1
0
mirror of https://github.com/twirl/The-API-Book.git synced 2024-11-30 08:06:47 +02:00

syntax highlighting

This commit is contained in:
Sergey Konstantinov 2023-08-29 00:00:38 +03:00
parent ff968089ae
commit 066f31a41f
41 changed files with 314 additions and 268 deletions

View File

@ -37,9 +37,7 @@ Thanks [Ilya Subbotin](https://ru.linkedin.com/in/isubbotin) and [Fedor Golubev]
Thanks [Ira Gorelik](https://pixabay.com/users/igorelick-680927/) for the Aqueduct. Thanks [Ira Gorelik](https://pixabay.com/users/igorelick-680927/) for the Aqueduct.
Thanks [ParaType](https://www.paratype.ru/) for PT Sans and PT Serif. Thanks [Friedrich Althausen](http://www.grafikfritze.de/) for Vollkorn, [Christian Robertson](https://twitter.com/cr64) for Roboto Mono, and [ParaType](https://www.paratype.ru/) for PT Sans.
Thanks [Christian Robertson](https://twitter.com/cr64) for Roboto Mono.
Thanks [Knut Sveidqvist and Mermaid Comminuty](https://mermaid.js.org/) for Mermaid. Thanks [Knut Sveidqvist and Mermaid Comminuty](https://mermaid.js.org/) for Mermaid.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,12 +1,18 @@
@font-face { @font-face {
font-family: local-serif; font-family: local-serif;
src: url(../assets/PTSerif-Regular.ttf); src: url(/fonts/Vollkorn-VariableFont_wght.ttf);
} }
@font-face { @font-face {
font-family: local-serif; font-family: local-serif;
src: url(../assets/PTSerif-Bold.ttf); src: url(/fonts/Vollkorn-Italic-VariableFont_wght.ttf);
font-weight: bold; font-style: italic;
}
@font-face {
font-family: local-serif;
src: url(/fonts/Vollkorn-Italic-VariableFont_wght.ttf);
font-style: oblique;
} }
@font-face { @font-face {

View File

@ -6,18 +6,18 @@
"repository": "github.com:twirl/The-API-Book", "repository": "github.com:twirl/The-API-Book",
"version": "2.0.0", "version": "2.0.0",
"devDependencies": { "devDependencies": {
"@jest/globals": "^29.6.1", "@jest/globals": "^29.6.4",
"@twirl/book-builder": "0.0.24", "@twirl/book-builder": "0.0.25",
"@types/jest": "^29.5.3", "@types/jest": "^29.5.4",
"express": "^4.18.2", "express": "^4.18.2",
"jest": "^29.6.1", "jest": "^29.6.4",
"jest-environment-jsdom": "^29.6.1", "jest-environment-jsdom": "^29.6.4",
"jest-mock-extended": "^3.0.4", "jest-mock-extended": "^3.0.5",
"monaco-editor": "^0.40.0", "monaco-editor": "^0.40.0",
"puppeteer": "^20.9.0", "puppeteer": "^20.9.0",
"ts-jest": "^29.1.1", "ts-jest": "^29.1.1",
"ts-loader": "^9.4.4", "ts-loader": "^9.4.4",
"typescript": "^5.1.6", "typescript": "^5.2.2",
"webpack": "^5.88.2" "webpack": "^5.88.2"
}, },
"scripts": { "scripts": {

View File

@ -1,7 +1,9 @@
import { readFileSync, readdirSync, unlinkSync } from 'fs'; import { readFileSync, readdirSync, unlinkSync } from 'fs';
import { resolve as pathResolve } from 'path'; import { resolve as pathResolve } from 'path';
import { init, plugins } from '@twirl/book-builder'; //import { init, plugins } from '@twirl/book-builder';
import templates from '../src/templates.js'; import { init, plugins } from '../../The-Book-Builder/index.js';
import { templates } from '../src/templates.mjs';
import { apiHighlight } from '../src/api-highlight.mjs';
import { buildLanding } from './build-landing.mjs'; import { buildLanding } from './build-landing.mjs';
const flags = process.argv.reduce((flags, v) => { const flags = process.argv.reduce((flags, v) => {
@ -63,7 +65,10 @@ console.log(`Building langs: ${langsToBuild.join(', ')}…`);
plugins.ast.aImg, plugins.ast.aImg,
plugins.ast.imgSrcResolve, plugins.ast.imgSrcResolve,
plugins.ast.highlighter({ plugins.ast.highlighter({
languages: ['javascript', 'typescript'] languages: ['javascript', 'typescript', 'json'],
languageDefinitions: {
json: apiHighlight
}
}), }),
plugins.ast.ref, plugins.ast.ref,
plugins.ast.ghTableFix, plugins.ast.ghTableFix,

40
src/api-highlight.mjs Normal file
View File

@ -0,0 +1,40 @@
export const apiHighlight = (hljs) => {
const ATTRIBUTE = {
begin: /(?<!":\s*)"(\\.|[^\\"\r\n])*"/,
className: 'attr'
};
const PUNCTUATION = {
match: /{}[[\],:]/,
className: 'punctuation'
};
const LITERALS = ['true', 'false', 'null'];
const LITERALS_MODE = {
scope: 'literal',
beginKeywords: LITERALS.join(' ')
};
return {
name: 'json',
keywords: {
keyword: 'GET POST PUT PATCH DELETE → …',
literal: LITERALS
},
contains: [
ATTRIBUTE,
{
scope: 'string',
begin: /(?!^:\s*)"/,
end: '"'
},
{
match: /{[\w\d-_]+}|<[\w\d-_\s\\n]+>/,
className: 'substitution'
},
PUNCTUATION,
LITERALS_MODE,
hljs.C_NUMBER_MODE,
hljs.C_LINE_COMMENT_MODE,
hljs.C_BLOCK_COMMENT_MODE
]
};
};

View File

@ -1,37 +1,17 @@
@font-face { @font-face {
font-family: local-serif; font-family: local-serif;
src: url(/fonts/PTSerif-Regular.ttf); src: url(/fonts/Vollkorn-VariableFont_wght.ttf);
} }
@font-face { @font-face {
font-family: local-serif; font-family: local-serif;
src: url(/fonts/PTSerif-Italic.ttf); src: url(/fonts/Vollkorn-Italic-VariableFont_wght.ttf);
font-style: italic; font-style: italic;
} }
@font-face { @font-face {
font-family: local-serif; font-family: local-serif;
src: url(/fonts/PTSerif-Italic.ttf); src: url(/fonts/Vollkorn-Italic-VariableFont_wght.ttf);
font-style: oblique;
}
@font-face {
font-family: local-serif;
src: url(/fonts/PTSerif-Bold.ttf);
font-weight: bold;
}
@font-face {
font-family: local-serif;
src: url(/fonts/PTSerif-BoldItalic.ttf);
font-weight: bold;
font-style: italic;
}
@font-face {
font-family: local-serif;
src: url(/fonts/PTSerif-BoldItalic.ttf);
font-weight: bold;
font-style: oblique; font-style: oblique;
} }
@ -195,7 +175,6 @@ h5 {
} }
body, body,
h5,
h6 { h6 {
font-family: local-serif, serif; font-family: local-serif, serif;
font-size: 14pt; font-size: 14pt;
@ -263,17 +242,43 @@ a.anchor {
text-decoration: none; text-decoration: none;
} }
.hljs-keyword, table {
border-collapse: separate;
line-height: 24px;
margin: 2em 0;
text-align: left;
font-size: 80%;
border-spacing: 0.2em 0;
}
table td,
table th {
border-bottom: 1px solid gray;
padding: 0.5em 0.7em;
}
.hljs-keyword {
color: rgb(207, 34, 46);
}
.hljs-variable { .hljs-variable {
font-weight: bold; color: rgb(149, 56, 0);
} }
.hljs-string { .hljs-string {
color: #2a9292; color: rgb(10, 48, 105);
} }
.hljs-comment { .hljs-comment {
color: #655f6d; color: rgb(110, 119, 129);
}
.hljs-attr {
color: rgb(149, 56, 0);
}
.hljs-substitution {
color: rgb(149, 56, 0);
} }
ul.references, ul.references,
@ -283,12 +288,6 @@ ul.bibliography {
text-align: left; text-align: left;
} }
a.ref sup,
ul.references sup,
ul.bibliography sup {
font-size: 60%;
}
ul.references li, ul.references li,
ul.bibliography li { ul.bibliography li {
margin-bottom: 0.5em; margin-bottom: 0.5em;

View File

@ -8,7 +8,7 @@ Most of the examples of APIs will be provided in the form of JSON-over-HTTP endp
Let's take a look at the following example: Let's take a look at the following example:
``` ```json
// Method description // Method description
POST /v1/bucket/{id}/some-resource⮠ POST /v1/bucket/{id}/some-resource⮠
/{resource_id} /{resource_id}

View File

@ -22,11 +22,11 @@ Each level presents a developer-facing “facet” in our API. While elaborating
Let's assume we have the following interface: Let's assume we have the following interface:
``` ```json
// Returns the lungo recipe // Returns the lungo recipe
GET /v1/recipes/lungo GET /v1/recipes/lungo
``` ```
``` ```json
// Posts an order to make a lungo // Posts an order to make a lungo
// using the specified coffee-machine, // using the specified coffee-machine,
// and returns an order identifier // and returns an order identifier
@ -36,7 +36,7 @@ POST /v1/orders
"recipe": "lungo" "recipe": "lungo"
} }
``` ```
``` ```json
// Returns the order // Returns the order
GET /v1/orders/{id} GET /v1/orders/{id}
``` ```
@ -45,7 +45,7 @@ Let's consider a question: how exactly should developers determine whether the o
* Add a reference beverage volume to the lungo recipe * Add a reference beverage volume to the lungo recipe
* Add the currently prepared volume of the beverage to the order state. * Add the currently prepared volume of the beverage to the order state.
``` ```json
GET /v1/recipes/lungo GET /v1/recipes/lungo
{ {
@ -53,7 +53,7 @@ GET /v1/recipes/lungo
"volume": "100ml" "volume": "100ml"
} }
``` ```
``` ```json
GET /v1/orders/{id} GET /v1/orders/{id}
{ {
@ -74,7 +74,7 @@ Option I: we have a list of possible volumes fixed and introduce bogus recipes l
Option II: we modify an interface, pronouncing volumes stated in recipes are just the default values. We allow requesting different cup volumes while placing an order: Option II: we modify an interface, pronouncing volumes stated in recipes are just the default values. We allow requesting different cup volumes while placing an order:
``` ```json
POST /v1/orders POST /v1/orders
{ {
"coffee_machine_id", "coffee_machine_id",
@ -89,7 +89,7 @@ For those orders with an arbitrary volume requested, a developer will need to ob
So we will get this: So we will get this:
``` ```json
GET /v1/orders/{id} GET /v1/orders/{id}
{ {
@ -124,7 +124,7 @@ In our example with coffee readiness detection, we clearly face the situation wh
A naïve approach to this situation is to design an interim abstraction level as a “connecting link,” which reformulates tasks from one abstraction level into another. For example, introduce a `task` entity like that: A naïve approach to this situation is to design an interim abstraction level as a “connecting link,” which reformulates tasks from one abstraction level into another. For example, introduce a `task` entity like that:
``` ```json
{ {
"volume_requested": "800ml", "volume_requested": "800ml",
@ -146,7 +146,7 @@ A naïve approach to this situation is to design an interim abstraction level as
So an `order` entity will keep links to the recipe and the task, thus not dealing with other abstraction layers directly: So an `order` entity will keep links to the recipe and the task, thus not dealing with other abstraction layers directly:
``` ```json
GET /v1/orders/{id} GET /v1/orders/{id}
{ {
@ -168,7 +168,7 @@ In our example let's assume that we have studied coffee machines' API specs, and
To be more specific, let's assume those two kinds of coffee machines provide the following physical API. To be more specific, let's assume those two kinds of coffee machines provide the following physical API.
* Coffee machines with pre-built programs: * Coffee machines with pre-built programs:
``` ```json
// Returns the list of // Returns the list of
// available programs // available programs
GET /programs GET /programs
@ -180,7 +180,7 @@ To be more specific, let's assume those two kinds of coffee machines provide the
"type": "lungo" "type": "lungo"
} }
``` ```
``` ```json
// Starts an execution // Starts an execution
// of the specified program // of the specified program
// and returns the execution status // and returns the execution status
@ -200,11 +200,11 @@ To be more specific, let's assume those two kinds of coffee machines provide the
"volume": "200ml" "volume": "200ml"
} }
``` ```
``` ```json
// Cancels the current program // Cancels the current program
POST /cancel POST /cancel
``` ```
``` ```json
// Returns the execution status. // Returns the execution status.
// The response format is the same // The response format is the same
// as in the `POST /execute` method // as in the `POST /execute` method
@ -214,7 +214,7 @@ To be more specific, let's assume those two kinds of coffee machines provide the
**NB**: This API violates a number of design principles, starting with a lack of versioning; it's described in such a manner because of two reasons: (1) to demonstrate how to design a more convenient API, (2) in the real life, you will really get something like that from vendors, and this API is actually quite a sane one. **NB**: This API violates a number of design principles, starting with a lack of versioning; it's described in such a manner because of two reasons: (1) to demonstrate how to design a more convenient API, (2) in the real life, you will really get something like that from vendors, and this API is actually quite a sane one.
* Coffee machines with built-in functions: * Coffee machines with built-in functions:
``` ```json
// Returns the list of // Returns the list of
// available functions // available functions
GET /functions GET /functions
@ -242,7 +242,7 @@ To be more specific, let's assume those two kinds of coffee machines provide the
] ]
} }
``` ```
``` ```json
// Takes arguments values // Takes arguments values
// and starts executing a function // and starts executing a function
POST /functions POST /functions
@ -254,7 +254,7 @@ To be more specific, let's assume those two kinds of coffee machines provide the
}] }]
} }
``` ```
``` ```json
// Returns the state of the sensors // Returns the state of the sensors
GET /sensors GET /sensors
@ -300,10 +300,10 @@ So we need to introduce two abstraction levels.
What does this mean in a practical sense? Developers will still be creating orders, dealing with high-level entities only: What does this mean in a practical sense? Developers will still be creating orders, dealing with high-level entities only:
``` ```json
POST /v1/orders POST /v1/orders
{ {
"coffee_machin "coffee_machine",
"recipe": "lungo", "recipe": "lungo",
"volume": "800ml" "volume": "800ml"
} }
@ -313,7 +313,7 @@ POST /v1/orders
The `POST /orders` handler checks all order parameters, puts a hold of the corresponding sum on the user's credit card, forms a request to run, and calls the execution level. First, a correct execution program needs to be fetched: The `POST /orders` handler checks all order parameters, puts a hold of the corresponding sum on the user's credit card, forms a request to run, and calls the execution level. First, a correct execution program needs to be fetched:
``` ```json
POST /v1/program-matcher POST /v1/program-matcher
{ "recipe", "coffee-machine" } { "recipe", "coffee-machine" }
@ -322,7 +322,7 @@ POST /v1/program-matcher
Now, after obtaining the correct `program` identifier, the handler runs the program: Now, after obtaining the correct `program` identifier, the handler runs the program:
``` ```json
POST /v1/programs/{id}/run POST /v1/programs/{id}/run
{ {
"order_id", "order_id",
@ -348,7 +348,7 @@ This approach has some benefits, like the possibility to provide different sets
Out of general considerations, the runtime level for the second-kind API will be private, so we are more or less free in implementing it. The easiest solution would be to develop a virtual state machine that creates a “runtime” (i.e., a stateful execution context) to run a program and control its state. Out of general considerations, the runtime level for the second-kind API will be private, so we are more or less free in implementing it. The easiest solution would be to develop a virtual state machine that creates a “runtime” (i.e., a stateful execution context) to run a program and control its state.
``` ```json
POST /v1/runtimes POST /v1/runtimes
{ {
"coffee_machine", "coffee_machine",
@ -359,7 +359,7 @@ POST /v1/runtimes
{ "runtime_id", "state" } { "runtime_id", "state" }
``` ```
The `program` here would look like that: The `program` here would look like that:
``` ```json
{ {
"program_id", "program_id",
"api_type", "api_type",
@ -376,7 +376,7 @@ The `program` here would look like that:
And the `state` like that: And the `state` like that:
``` ```json
{ {
// The `runtime` status: // The `runtime` status:
// * "pending" — awaiting execution // * "pending" — awaiting execution

View File

@ -46,7 +46,7 @@ Obviously, the first step is to offer a choice to the user, to make them point o
If we try writing pseudocode, we will get something like this: If we try writing pseudocode, we will get something like this:
``` ```typescript
// Retrieve all possible recipes // Retrieve all possible recipes
let recipes = let recipes =
api.getRecipes(); api.getRecipes();
@ -79,7 +79,7 @@ The necessity of adding a new endpoint for searching becomes obvious. To design
Then our new interface would look like this: Then our new interface would look like this:
``` ```json
POST /v1/offers/search POST /v1/offers/search
{ {
// optional // optional
@ -114,7 +114,7 @@ Here:
Coming back to the code developers write, it would now look like that: Coming back to the code developers write, it would now look like that:
``` ```typescript
// Searching for offers // Searching for offers
// matching a user's intent // matching a user's intent
let offers = api.search(parameters); let offers = api.search(parameters);
@ -135,7 +135,7 @@ To solve the third problem we could demand that the displayed price be included
One solution is to provide a special identifier to an offer. This identifier must be specified in an order creation request: One solution is to provide a special identifier to an offer. This identifier must be specified in an order creation request:
``` ```json
{ {
"results": [ "results": [
{ {
@ -163,7 +163,7 @@ As an alternative, we could split the endpoints: one for searching, and one for
And one more step towards making developers' lives easier: what would an “invalid price” error look like? And one more step towards making developers' lives easier: what would an “invalid price” error look like?
``` ```json
POST /v1/orders POST /v1/orders
{ "offer_id", … } { "offer_id", … }
→ 409 Conflict → 409 Conflict
@ -187,7 +187,7 @@ The main rule of error interfaces in APIs is that an error response must help a
In our case, the price mismatch error should look like this: In our case, the price mismatch error should look like this:
``` ```json
409 Conflict 409 Conflict
{ {
// Error kind // Error kind
@ -219,7 +219,7 @@ The only possible method of overcoming this law is decomposition. Entities shoul
Let's take a look at the coffee machine search function response in our API. To ensure an adequate UX of the app, quite bulky datasets are required: Let's take a look at the coffee machine search function response in our API. To ensure an adequate UX of the app, quite bulky datasets are required:
``` ```json
{ {
"results": [{ "results": [{
// Coffee machine data // Coffee machine data
@ -271,7 +271,7 @@ In this situation, we need to split this structure into data domains by grouping
Let's group them together: Let's group them together:
``` ```json
{ {
"results": [{ "results": [{
// Place data // Place data

View File

@ -21,7 +21,7 @@ It is important to understand that you can always introduce your own concepts. F
The entity name should explicitly indicate what the entity does and what side effects to expect when using it. The entity name should explicitly indicate what the entity does and what side effects to expect when using it.
**Bad**: **Bad**:
``` ```typescript
// Cancels an order // Cancels an order
order.canceled = true; order.canceled = true;
``` ```
@ -29,13 +29,13 @@ order.canceled = true;
It is not obvious that a state field might be modified, and that this operation will cancel the order. It is not obvious that a state field might be modified, and that this operation will cancel the order.
**Better**: **Better**:
``` ```typescript
// Cancels an order // Cancels an order
order.cancel(); order.cancel();
``` ```
**Bad**: **Bad**:
``` ```typescript
// Returns aggregated statistics // Returns aggregated statistics
// since the beginning of time // since the beginning of time
orders.getStats() orders.getStats()
@ -43,7 +43,7 @@ orders.getStats()
Even if the operation is non-modifying but computationally expensive, you should explicitly indicate that, especially if clients are charged for computational resource usage. Furthermore, default values should not be set in a way that leads to maximum resource consumption. Even if the operation is non-modifying but computationally expensive, you should explicitly indicate that, especially if clients are charged for computational resource usage. Furthermore, default values should not be set in a way that leads to maximum resource consumption.
**Better**: **Better**:
``` ```typescript
// Calculates and returns // Calculates and returns
// aggregated statistics // aggregated statistics
// for a specified period of time // for a specified period of time
@ -103,17 +103,17 @@ In the 21st century, there's no need to shorten entities' names.
**Better**: `order.getEstimatedDeliveryTime()`. **Better**: `order.getEstimatedDeliveryTime()`.
**Bad**: **Bad**:
``` ```typescript
// Returns a pointer to the first occurrence // Returns a pointer to the first occurrence
// in str1 of any of the characters // in str1 of any of the characters
// that are part of str2 // that are part of str2
strpbrk (str1, str2) strpbrk(str1, str2)
``` ```
Possibly, the author of this API thought that the abbreviation `pbrk` would mean something to readers, but that is clearly mistaken. It is also hard to understand from the signature which string (`str1` or `str2`) represents a character set. Possibly, the author of this API thought that the abbreviation `pbrk` would mean something to readers, but that is clearly mistaken. It is also hard to understand from the signature which string (`str1` or `str2`) represents a character set.
**Better**: **Better**:
``` ```typescript
str_search_for_characters( str_search_for_characters(
str, str,
lookup_character_set lookup_character_set
@ -146,7 +146,7 @@ If an entity name is a polysemantic term itself, which could confuse developers,
**Bad**: **Bad**:
``` ```typescript
// Returns a list of // Returns a list of
// coffee machine builtin functions // coffee machine builtin functions
GET /coffee-machines/{id}/functions GET /coffee-machines/{id}/functions
@ -155,7 +155,7 @@ GET /coffee-machines/{id}/functions
The word “function” is ambiguous. It might refer to built-in functions, but it could also mean “a piece of code,” or a state (machine is functioning). The word “function” is ambiguous. It might refer to built-in functions, but it could also mean “a piece of code,” or a state (machine is functioning).
**Better**: **Better**:
``` ```typescript
GET /v1/coffee-machines/{id}⮠ GET /v1/coffee-machines/{id}⮠
/builtin-functions-list /builtin-functions-list
``` ```
@ -168,7 +168,7 @@ GET /v1/coffee-machines/{id}⮠
**Better**: either `begin_transition` / `end_transition` or `start_transition` / `stop_transition`. **Better**: either `begin_transition` / `end_transition` or `start_transition` / `stop_transition`.
**Bad**: **Bad**:
``` ```typescript
// Find the position of the first occurrence // Find the position of the first occurrence
// of a substring in a string // of a substring in a string
strpos(haystack, needle) strpos(haystack, needle)
@ -195,7 +195,7 @@ Improving these function signatures is left as an exercise for the reader.
It is also worth mentioning that mistakes in using De Morgan's laws[ref De Morgan's laws](https://en.wikipedia.org/wiki/De_Morgan's_laws) are even more common. For example, if you have two flags: It is also worth mentioning that mistakes in using De Morgan's laws[ref De Morgan's laws](https://en.wikipedia.org/wiki/De_Morgan's_laws) are even more common. For example, if you have two flags:
``` ```json
GET /coffee-machines/{id}/stocks GET /coffee-machines/{id}/stocks
{ {
@ -206,7 +206,7 @@ GET /coffee-machines/{id}/stocks
The condition “coffee might be prepared” would look like `has_beans && has_cup` — both flags must be true. However, if you provide the negations of both flags: The condition “coffee might be prepared” would look like `has_beans && has_cup` — both flags must be true. However, if you provide the negations of both flags:
``` ```json
{ {
"beans_absence": false, "beans_absence": false,
"cup_absence": false "cup_absence": false
@ -219,7 +219,7 @@ The condition “coffee might be prepared” would look like `has_beans && has_c
This advice contradicts the previous one, ironically. When developing APIs you frequently need to add a new optional field with a non-empty default value. For example: This advice contradicts the previous one, ironically. When developing APIs you frequently need to add a new optional field with a non-empty default value. For example:
``` ```typescript
const orderParams = { const orderParams = {
contactless_delivery: false contactless_delivery: false
}; };
@ -230,13 +230,11 @@ const order = api.createOrder(
This new `contactless_delivery` option isn't required, but its default value is `true`. A question arises: how should developers discern the explicit intention to disable the option (`false`) from not knowing if it exists (the field isn't set)? They would have to write something like: This new `contactless_delivery` option isn't required, but its default value is `true`. A question arises: how should developers discern the explicit intention to disable the option (`false`) from not knowing if it exists (the field isn't set)? They would have to write something like:
``` ```typescript
if ( const value = orderParams
Type( .contactless_delivery;
orderParams.contactless_delivery if (Type(value) == 'Boolean' &&
) == 'Boolean' && value == false) {
orderParams
.contactless_delivery == false) {
} }
``` ```
@ -246,7 +244,7 @@ This practice makes the code more complicated, and it's quite easy to make mista
If the protocol does not support resetting to default values as a first-class citizen, the universal rule is to make all new Boolean flags false by default. If the protocol does not support resetting to default values as a first-class citizen, the universal rule is to make all new Boolean flags false by default.
**Better** **Better**
``` ```typescript
const orderParams = { const orderParams = {
force_contact_delivery: true force_contact_delivery: true
}; };
@ -258,7 +256,7 @@ const order = api.createOrder(
If a non-Boolean field with a specially treated absence of value is to be introduced, then introduce two fields. If a non-Boolean field with a specially treated absence of value is to be introduced, then introduce two fields.
**Bad**: **Bad**:
``` ```json
// Creates a user // Creates a user
POST /v1/users POST /v1/users
{ … } { … }
@ -278,7 +276,7 @@ PUT /v1/users/{id}
``` ```
**Better** **Better**
``` ```json
POST /v1/users POST /v1/users
{ {
// true — user explicitly cancels // true — user explicitly cancels
@ -338,7 +336,7 @@ As a useful exercise, try modeling the typical lifecycle of a partner's app's ma
If a server processes a request correctly and no exceptional situation occurs, there should be no error. Unfortunately, the antipattern of throwing errors when no results are found is widespread. If a server processes a request correctly and no exceptional situation occurs, there should be no error. Unfortunately, the antipattern of throwing errors when no results are found is widespread.
**Bad** **Bad**
``` ```json
POST /v1/coffee-machines/search POST /v1/coffee-machines/search
{ {
"query": "lungo", "query": "lungo",
@ -354,7 +352,7 @@ POST /v1/coffee-machines/search
The response implies that a client made a mistake. However, in this case, neither the customer nor the developer made any mistakes. The client cannot know beforehand whether lungo is served in this location. The response implies that a client made a mistake. However, in this case, neither the customer nor the developer made any mistakes. The client cannot know beforehand whether lungo is served in this location.
**Better**: **Better**:
``` ```json
POST /v1/coffee-machines/search POST /v1/coffee-machines/search
{ {
"query": "lungo", "query": "lungo",
@ -370,7 +368,7 @@ This rule can be summarized as follows: if an array is the result of the operati
**NB**: This pattern should also be applied in the opposite case. If an array of entities is an optional parameter in the request, the empty array and the absence of the field must be treated differently. Let's consider the example: **NB**: This pattern should also be applied in the opposite case. If an array of entities is an optional parameter in the request, the empty array and the absence of the field must be treated differently. Let's consider the example:
``` ```json
// Finds all coffee recipes // Finds all coffee recipes
// that contain no milk // that contain no milk
POST /v1/recipes/search POST /v1/recipes/search
@ -403,7 +401,7 @@ POST /v1/offers/search
Now let's imagine that the first request returned an empty array of results meaning there are no known recipes that satisfy the condition. Ideally, the developer would have expected this situation and installed a guard to prevent the call to the offer search function in this case. However, we can't be 100% sure they did. If this logic is missing, the application will make the following call: Now let's imagine that the first request returned an empty array of results meaning there are no known recipes that satisfy the condition. Ideally, the developer would have expected this situation and installed a guard to prevent the call to the offer search function in this case. However, we can't be 100% sure they did. If this logic is missing, the application will make the following call:
``` ```json
POST /v1/offers/search POST /v1/offers/search
{ {
"location", "location",
@ -420,7 +418,7 @@ The decision of whether to use an exception or an empty response in the previous
This rule applies not only to empty arrays but to every restriction specified in the contract. “Silently” fixing invalid values rarely makes practical sense. This rule applies not only to empty arrays but to every restriction specified in the contract. “Silently” fixing invalid values rarely makes practical sense.
**Bad**: **Bad**:
``` ```json
POST /v1/offers/search POST /v1/offers/search
{ {
"location": { "location": {
@ -439,7 +437,7 @@ POST /v1/offers/search
As we can see, the developer somehow passed the wrong latitude value (100 degrees). Yes, we can “fix” it by reducing it to the closest valid value, which is 90 degrees, but who benefits from this? The developer will never learn about this mistake, and we doubt that coffee offers in the Northern Pole vicinity are relevant to users. As we can see, the developer somehow passed the wrong latitude value (100 degrees). Yes, we can “fix” it by reducing it to the closest valid value, which is 90 degrees, but who benefits from this? The developer will never learn about this mistake, and we doubt that coffee offers in the Northern Pole vicinity are relevant to users.
**Better**: **Better**:
``` ```json
POST /v1/coffee-machines/search POST /v1/coffee-machines/search
{ {
"location": { "location": {
@ -455,7 +453,7 @@ POST /v1/coffee-machines/search
It is also useful to proactively notify partners about behavior that appears to be a mistake: It is also useful to proactively notify partners about behavior that appears to be a mistake:
``` ```json
POST /v1/coffee-machines/search POST /v1/coffee-machines/search
{ {
"location": { "location": {
@ -481,9 +479,9 @@ POST /v1/coffee-machines/search
If it is not possible to add such notices, we can introduce a debug mode or strict mode in which notices are escalated: If it is not possible to add such notices, we can introduce a debug mode or strict mode in which notices are escalated:
``` ```json
POST /v1/coffee-machines/search⮠ POST /v1/coffee-machines/search⮠
strict_mode=true ?strict_mode=true
{ {
"location": { "location": {
"latitude": 0, "latitude": 0,
@ -503,10 +501,10 @@ POST /v1/coffee-machines/search⮠
If the [0, 0] coordinates are not an error, it makes sense to allow for manual bypassing of specific errors: If the [0, 0] coordinates are not an error, it makes sense to allow for manual bypassing of specific errors:
``` ```json
POST /v1/coffee-machines/search⮠ POST /v1/coffee-machines/search⮠
strict_mode=true⮠ ?strict_mode=true⮠
disable_errors=suspicious_coordinates &disable_errors=suspicious_coordinates
``` ```
##### Default Values Must Make Sense ##### Default Values Must Make Sense
@ -514,7 +512,7 @@ POST /v1/coffee-machines/search⮠
Setting default values is one of the most powerful tools that help avoid verbosity when working with APIs. However, these values should help developers rather than hide their mistakes. Setting default values is one of the most powerful tools that help avoid verbosity when working with APIs. However, these values should help developers rather than hide their mistakes.
**Bad**: **Bad**:
``` ```json
POST /v1/coffee-machines/search POST /v1/coffee-machines/search
{ {
"recipes": ["lungo"] "recipes": ["lungo"]
@ -532,7 +530,7 @@ POST /v1/coffee-machines/search
Formally speaking, having such behavior is feasible: why not have a “default geographical coordinates” concept? However, in reality, such policies of “silently” fixing mistakes lead to absurd situations like “the null island” — the most visited place in the world[ref Hrala, J. Welcome to Null Island, The Most 'Visited' Place on Earth That Doesn't Actually Exist](https://www.sciencealert.com/welcome-to-null-island-the-most-visited-place-that-doesn-t-exist). The more popular an API becomes, the higher the chances that partners will overlook these edge cases. Formally speaking, having such behavior is feasible: why not have a “default geographical coordinates” concept? However, in reality, such policies of “silently” fixing mistakes lead to absurd situations like “the null island” — the most visited place in the world[ref Hrala, J. Welcome to Null Island, The Most 'Visited' Place on Earth That Doesn't Actually Exist](https://www.sciencealert.com/welcome-to-null-island-the-most-visited-place-that-doesn-t-exist). The more popular an API becomes, the higher the chances that partners will overlook these edge cases.
**Better**: **Better**:
``` ```json
POST /v1/coffee-machines/search POST /v1/coffee-machines/search
{ {
"recipes": ["lungo"] "recipes": ["lungo"]
@ -549,7 +547,7 @@ POST /v1/coffee-machines/search
It is not enough to simply validate inputs; providing proper descriptions of errors is also essential. When developers write code, they encounter problems, sometimes quite trivial, such as invalid parameter types or boundary violations. The more convenient the error responses returned by your API, the less time developers will waste struggling with them, and the more comfortable working with the API will be for them. It is not enough to simply validate inputs; providing proper descriptions of errors is also essential. When developers write code, they encounter problems, sometimes quite trivial, such as invalid parameter types or boundary violations. The more convenient the error responses returned by your API, the less time developers will waste struggling with them, and the more comfortable working with the API will be for them.
**Bad**: **Bad**:
``` ```json
POST /v1/coffee-machines/search POST /v1/coffee-machines/search
{ {
"recipes": ["lngo"], "recipes": ["lngo"],
@ -564,7 +562,7 @@ POST /v1/coffee-machines/search
— of course, the mistakes (typo in `"lngo"`, wrong coordinates) are obvious. But the handler checks them anyway, so why not return readable descriptions? — of course, the mistakes (typo in `"lngo"`, wrong coordinates) are obvious. But the handler checks them anyway, so why not return readable descriptions?
**Better**: **Better**:
``` ```json
{ {
"reason": "wrong_parameter_value", "reason": "wrong_parameter_value",
"localized_message": "localized_message":
@ -601,7 +599,7 @@ It is also a good practice to return all detectable errors at once to save devel
##### Return Unresolvable Errors First ##### Return Unresolvable Errors First
``` ```json
POST /v1/orders POST /v1/orders
{ {
"recipe": "lngo", "recipe": "lngo",
@ -630,7 +628,7 @@ POST /v1/orders
If the errors under consideration are resolvable (i.e., the user can take some actions and still get what they need), you should first notify them of those errors that will require more significant state updates. If the errors under consideration are resolvable (i.e., the user can take some actions and still get what they need), you should first notify them of those errors that will require more significant state updates.
**Bad**: **Bad**:
``` ```json
POST /v1/orders POST /v1/orders
{ {
"items": [{ "items": [{
@ -678,7 +676,7 @@ POST /v1/orders
In complex systems, it might happen that resolving one error leads to another one, and vice versa. In complex systems, it might happen that resolving one error leads to another one, and vice versa.
``` ```json
// Create an order // Create an order
// with paid delivery // with paid delivery
POST /v1/orders POST /v1/orders
@ -726,7 +724,7 @@ Let's emphasize that we understand “cache” in the extended sense: which vari
**Bad**: **Bad**:
``` ```json
// Returns lungo prices including // Returns lungo prices including
// delivery to the specified location // delivery to the specified location
GET /price?recipe=lungo⮠ GET /price?recipe=lungo⮠
@ -741,7 +739,7 @@ Two questions arise:
**Better**: you may use standard protocol capabilities to denote cache options, such as the `Cache-Control` header. If you need caching in both temporal and spatial dimensions, you should do something like this: **Better**: you may use standard protocol capabilities to denote cache options, such as the `Cache-Control` header. If you need caching in both temporal and spatial dimensions, you should do something like this:
``` ```json
GET /price?recipe=lungo⮠ GET /price?recipe=lungo⮠
&longitude={longitude}⮠ &longitude={longitude}⮠
&latitude={latitude} &latitude={latitude}
@ -782,14 +780,14 @@ Let us remind the reader that idempotency is the following property: repeated ca
If an endpoint's idempotency can not be naturally assured, explicit idempotency parameters must be added in the form of a token or a resource version. If an endpoint's idempotency can not be naturally assured, explicit idempotency parameters must be added in the form of a token or a resource version.
**Bad**: **Bad**:
``` ```json
// Creates an order // Creates an order
POST /orders POST /orders
``` ```
A second order will be produced if the request is repeated! A second order will be produced if the request is repeated!
**Better**: **Better**:
``` ```json
// Creates an order // Creates an order
POST /v1/orders POST /v1/orders
X-Idempotency-Token: <random string> X-Idempotency-Token: <random string>
@ -798,13 +796,13 @@ X-Idempotency-Token: <random string>
The client must retain the `X-Idempotency-Token` in case of automated endpoint retrying. The server must check whether an order created with this token already exists. The client must retain the `X-Idempotency-Token` in case of automated endpoint retrying. The server must check whether an order created with this token already exists.
**Alternatively**: **Alternatively**:
``` ```json
// Creates order draft // Creates order draft
POST /v1/orders/drafts POST /v1/orders/drafts
{ "draft_id" } { "draft_id" }
``` ```
``` ```json
// Confirms the draft // Confirms the draft
PUT /v1/orders/drafts⮠ PUT /v1/orders/drafts⮠
/{draft_id}/confirmation /{draft_id}/confirmation
@ -819,7 +817,7 @@ It is also worth mentioning that adding idempotency tokens to naturally idempote
Consider the following example: imagine there is a shared resource, characterized by a revision number, and the client tries to update it. Consider the following example: imagine there is a shared resource, characterized by a revision number, and the client tries to update it.
``` ```json
POST /resource/updates POST /resource/updates
{ {
"resource_revision": 123 "resource_revision": 123
@ -833,7 +831,7 @@ The server can compare request bodies, assuming that identical requests mean ret
Adding the idempotency token (either directly as a random string or indirectly in the form of drafts) solves this problem. Adding the idempotency token (either directly as a random string or indirectly in the form of drafts) solves this problem.
``` ```json
POST /resource/updates POST /resource/updates
X-Idempotency-Token: <token> X-Idempotency-Token: <token>
{ {
@ -847,7 +845,7 @@ X-Idempotency-Token: <token>
Or: Or:
``` ```json
POST /resource/updates POST /resource/updates
X-Idempotency-Token: <token> X-Idempotency-Token: <token>
{ {
@ -882,14 +880,14 @@ And just in case: all APIs must be provided over TLS 1.2 or higher (preferably 1
It is equally important to provide interfaces to partners that minimize potential security problems for them. It is equally important to provide interfaces to partners that minimize potential security problems for them.
**Bad**: **Bad**:
``` ```json
// Allows partners to set // Allows partners to set
// descriptions for their beverages // descriptions for their beverages
PUT /v1/partner-api/{partner-id}⮠ PUT /v1/partner-api/{partner-id}⮠
/recipes/lungo/info /recipes/lungo/info
"<script>alert(document.cookie)</script>" "<script>alert(document.cookie)</script>"
``` ```
``` ```json
// Returns the desciption // Returns the desciption
GET /v1/partner-api/{partner-id}⮠ GET /v1/partner-api/{partner-id}⮠
/recipes/lungo/info /recipes/lungo/info
@ -902,7 +900,7 @@ Such an interface directly creates a stored XSS vulnerability that potential att
In these situations, we recommend, first, sanitizing the data if it appears potentially exploitable (e.g. if it is meant to be displayed in the UI and/or is accessible through a direct link). Second, limiting the blast radius so that stored exploits in one partner's data space can't affect other partners. If the functionality of unsafe data input is still required, the risks must be explicitly addressed: In these situations, we recommend, first, sanitizing the data if it appears potentially exploitable (e.g. if it is meant to be displayed in the UI and/or is accessible through a direct link). Second, limiting the blast radius so that stored exploits in one partner's data space can't affect other partners. If the functionality of unsafe data input is still required, the risks must be explicitly addressed:
**Better** (though not perfect): **Better** (though not perfect):
``` ```json
// Allows for setting a potentially // Allows for setting a potentially
// unsafe description for a beverage // unsafe description for a beverage
PUT /v1/partner-api/{partner-id}⮠ PUT /v1/partner-api/{partner-id}⮠
@ -910,7 +908,7 @@ PUT /v1/partner-api/{partner-id}⮠
X-Dangerously-Disable-Sanitizing: true X-Dangerously-Disable-Sanitizing: true
"<script>alert(document.cookie)</script>" "<script>alert(document.cookie)</script>"
``` ```
``` ```json
// Returns the potentially // Returns the potentially
// unsafe description // unsafe description
GET /v1/partner-api/{partner-id}⮠ GET /v1/partner-api/{partner-id}⮠
@ -923,7 +921,7 @@ X-Dangerously-Allow-Raw-Value: true
One important finding is that if you allow executing scripts via the API, always prefer typed input over unsafe input: One important finding is that if you allow executing scripts via the API, always prefer typed input over unsafe input:
**Bad**: **Bad**:
``` ```json
POST /v1/run/sql POST /v1/run/sql
{ {
// Passes the full script // Passes the full script
@ -933,14 +931,14 @@ POST /v1/run/sql
} }
``` ```
**Better**: **Better**:
``` ```json
POST /v1/run/sql POST /v1/run/sql
{ {
// Passes the script template // Passes the script template
"query": "INSERT INTO data (name)⮠ "query": "INSERT INTO data (name)⮠
VALUES (?)", VALUES (?)",
// and the parameters to set // and the parameters to set
values: [ "values": [
"Robert');⮠ "Robert');⮠
DROP TABLE students;--" DROP TABLE students;--"
] ]

View File

@ -4,7 +4,7 @@ Let's summarize the current state of our API study.
##### Offer Search ##### Offer Search
``` ```json
POST /v1/offers/search POST /v1/offers/search
{ {
// optional // optional
@ -55,14 +55,14 @@ POST /v1/offers/search
##### Working with Recipes ##### Working with Recipes
``` ```json
// Returns a list of recipes // Returns a list of recipes
// Cursor parameter is optional // Cursor parameter is optional
GET /v1/recipes?cursor=<cursor> GET /v1/recipes?cursor=<cursor>
{ "recipes", "cursor" } { "recipes", "cursor" }
``` ```
``` ```json
// Returns the recipe by its id // Returns the recipe by its id
GET /v1/recipes/{id} GET /v1/recipes/{id}
@ -75,7 +75,7 @@ GET /v1/recipes/{id}
##### Working with Orders ##### Working with Orders
``` ```json
// Creates an order // Creates an order
POST /v1/orders POST /v1/orders
{ {
@ -91,20 +91,20 @@ POST /v1/orders
{ "order_id" } { "order_id" }
``` ```
``` ```json
// Returns the order by its id // Returns the order by its id
GET /v1/orders/{id} GET /v1/orders/{id}
{ "order_id", "status" } { "order_id", "status" }
``` ```
``` ```json
// Cancels the order // Cancels the order
POST /v1/orders/{id}/cancel POST /v1/orders/{id}/cancel
``` ```
##### Working with Programs ##### Working with Programs
``` ```json
// Returns an identifier of the program // Returns an identifier of the program
// corresponding to specific recipe // corresponding to specific recipe
// on specific coffee-machine // on specific coffee-machine
@ -113,7 +113,7 @@ POST /v1/program-matcher
{ "program_id" } { "program_id" }
``` ```
``` ```json
// Return program description // Return program description
// by its id // by its id
GET /v1/programs/{id} GET /v1/programs/{id}
@ -134,7 +134,7 @@ GET /v1/programs/{id}
##### Running Programs ##### Running Programs
``` ```json
// Runs the specified program // Runs the specified program
// on the specified coffee-machine // on the specified coffee-machine
// with specific parameters // with specific parameters
@ -152,14 +152,14 @@ POST /v1/programs/{id}/run
{ "program_run_id" } { "program_run_id" }
``` ```
``` ```json
// Stops program running // Stops program running
POST /v1/runs/{id}/cancel POST /v1/runs/{id}/cancel
``` ```
##### Managing Runtimes ##### Managing Runtimes
``` ```json
// Creates a new runtime // Creates a new runtime
POST /v1/runtimes POST /v1/runtimes
{ {
@ -170,7 +170,7 @@ POST /v1/runtimes
{ "runtime_id", "state" } { "runtime_id", "state" }
``` ```
``` ```json
// Returns the state // Returns the state
// of the specified runtime // of the specified runtime
GET /v1/runtimes/{runtime_id}/state GET /v1/runtimes/{runtime_id}/state
@ -183,7 +183,7 @@ GET /v1/runtimes/{runtime_id}/state
"variables" "variables"
} }
``` ```
``` ```json
// Terminates the runtime // Terminates the runtime
POST /v1/runtimes/{id}/terminate POST /v1/runtimes/{id}/terminate
``` ```

View File

@ -6,7 +6,7 @@ Let's proceed to the technical problems that API developers face. We begin with
2. Because of network issues, the request propagates to the server very slowly, and the client gets a timeout 2. Because of network issues, the request propagates to the server very slowly, and the client gets a timeout
* Therefore, the client does not know whether the query was served or not. * Therefore, the client does not know whether the query was served or not.
3. The client requests the current state of the system and gets an empty response as the initial request still hasn't reached the server: 3. The client requests the current state of the system and gets an empty response as the initial request still hasn't reached the server:
``` ```typescript
const pendingOrders = await const pendingOrders = await
api.getOngoingOrders(); // → [] api.getOngoingOrders(); // → []
``` ```
@ -23,7 +23,7 @@ There are two main approaches to solving this problem: the pessimistic one (impl
The first approach is to literally implement standard synchronization primitives at the API level. Like this, for example: The first approach is to literally implement standard synchronization primitives at the API level. Like this, for example:
``` ```typescript
let lock; let lock;
try { try {
// Capture the exclusive // Capture the exclusive
@ -59,7 +59,7 @@ Rather unsurprisingly, this approach sees very rare use in distributed client-se
A less implementation-heavy approach is to develop an optimistic concurrency control[ref Optimistic concurrency control](https://en.wikipedia.org/wiki/Optimistic_concurrency_control) system, i.e., to require clients to pass a flag proving they know the actual state of a shared resource. A less implementation-heavy approach is to develop an optimistic concurrency control[ref Optimistic concurrency control](https://en.wikipedia.org/wiki/Optimistic_concurrency_control) system, i.e., to require clients to pass a flag proving they know the actual state of a shared resource.
``` ```typescript
// Retrieve the state // Retrieve the state
const orderState = const orderState =
await api.getOrderState(); await api.getOrderState();

View File

@ -2,7 +2,7 @@
The approach described in the previous chapter is in fact a trade-off: the API performance issues are traded for “normal” (i.e., expected) background errors that happen while working with the API. This is achieved by isolating the component responsible for controlling concurrency and only exposing read-only tokens in the public API. Still, the achievable throughput of the API is limited, and the only way of scaling it up is removing the strict consistency from the external API and thus allowing reading system state from read-only replicas: The approach described in the previous chapter is in fact a trade-off: the API performance issues are traded for “normal” (i.e., expected) background errors that happen while working with the API. This is achieved by isolating the component responsible for controlling concurrency and only exposing read-only tokens in the public API. Still, the achievable throughput of the API is limited, and the only way of scaling it up is removing the strict consistency from the external API and thus allowing reading system state from read-only replicas:
``` ```typescript
// Reading the state, // Reading the state,
// possibly from a replica // possibly from a replica
const orderState = const orderState =
@ -26,7 +26,7 @@ As orders are created much more rarely than read, we might significantly increas
Choosing weak consistency instead of a strict one, however, brings some disadvantages. For instance, we might require partners to wait until they get the actual resource state to make changes — but it is quite unobvious for partners (and actually inconvenient) they must be prepared to wait for changes they made themselves to propagate. Choosing weak consistency instead of a strict one, however, brings some disadvantages. For instance, we might require partners to wait until they get the actual resource state to make changes — but it is quite unobvious for partners (and actually inconvenient) they must be prepared to wait for changes they made themselves to propagate.
``` ```typescript
// Creates an order // Creates an order
const api = await api const api = await api
.createOrder(…) .createOrder(…)
@ -40,7 +40,7 @@ If strict consistency is not guaranteed, the second call might easily return an
An important pattern that helps in this situation is implementing the “read-your-writes[ref Consistency Model. Read-Your-Writes Consistency](https://en.wikipedia.org/wiki/Consistency_model#Read-your-writes_consistency)” model, i.e., guaranteeing that clients observe the changes they have just made. The consistency might be lifted to the read-your-writes level by making clients pass some token that describes the last changes known to the client. An important pattern that helps in this situation is implementing the “read-your-writes[ref Consistency Model. Read-Your-Writes Consistency](https://en.wikipedia.org/wiki/Consistency_model#Read-your-writes_consistency)” model, i.e., guaranteeing that clients observe the changes they have just made. The consistency might be lifted to the read-your-writes level by making clients pass some token that describes the last changes known to the client.
``` ```typescript
const order = await api const order = await api
.createOrder(…); .createOrder(…);
const pendingOrders = await api. const pendingOrders = await api.

View File

@ -6,7 +6,7 @@ We remember that this probability is equal to the ratio of time periods: getting
Our usage scenario looks like this: Our usage scenario looks like this:
``` ```typescript
const pendingOrders = await api. const pendingOrders = await api.
getOngoingOrders(); getOngoingOrders();
if (pendingOrders.length == 0) { if (pendingOrders.length == 0) {
@ -32,7 +32,7 @@ However, what we could do to improve this timing remains unclear. Creating an or
What could help us here is the asynchronous operations pattern. If our goal is to reduce the collision rate, there is no need to wait until the order is *actually* created as we need to quickly propagate the knowledge that the order is *accepted for creation*. We might employ the following technique: create *a task for order creation* and return its identifier, not the order itself. What could help us here is the asynchronous operations pattern. If our goal is to reduce the collision rate, there is no need to wait until the order is *actually* created as we need to quickly propagate the knowledge that the order is *accepted for creation*. We might employ the following technique: create *a task for order creation* and return its identifier, not the order itself.
``` ```typescript
const pendingOrders = await api. const pendingOrders = await api.
getOngoingOrders(); getOngoingOrders();
if (pendingOrders.length == 0) { if (pendingOrders.length == 0) {
@ -79,7 +79,7 @@ However, we must stress that excessive asynchronicity, though appealing to API d
Therefore, despite all the advantages of the approach, we tend to recommend applying this pattern only to those cases when they are really needed (as in the example we started with when we needed to lower the probability of collisions) and having separate queues for each case. The perfect task queue solution is the one that doesn't look like a task queue. For example, we might simply make the “order creation task is accepted and awaits execution” state a separate order status and make its identifier the future identifier of the order itself: Therefore, despite all the advantages of the approach, we tend to recommend applying this pattern only to those cases when they are really needed (as in the example we started with when we needed to lower the probability of collisions) and having separate queues for each case. The perfect task queue solution is the one that doesn't look like a task queue. For example, we might simply make the “order creation task is accepted and awaits execution” state a separate order status and make its identifier the future identifier of the order itself:
``` ```typescript
const pendingOrders = await api. const pendingOrders = await api.
getOngoingOrders(); getOngoingOrders();
if (pendingOrders.length == 0) { if (pendingOrders.length == 0) {

View File

@ -2,7 +2,7 @@
In the previous chapter, we concluded with the following interface that allows minimizing collisions while creating orders: In the previous chapter, we concluded with the following interface that allows minimizing collisions while creating orders:
``` ```typescript
const pendingOrders = await api const pendingOrders = await api
.getOngoingOrders(); .getOngoingOrders();
@ -18,7 +18,7 @@ However, an attentive reader might notice that this interface violates the recom
Fixing this problem is rather simple: we might introduce a limit for the items returned in the response, and allow passing filtering and sorting parameters, like this: Fixing this problem is rather simple: we might introduce a limit for the items returned in the response, and allow passing filtering and sorting parameters, like this:
``` ```typescript
api.getOngoingOrders({ api.getOngoingOrders({
// The `limit` parameter // The `limit` parameter
// is optional, but there is // is optional, but there is
@ -37,7 +37,7 @@ However, introducing limits leads to another issue: if the number of items to re
The standard approach is to add an `offset` parameter or a page number: The standard approach is to add an `offset` parameter or a page number:
``` ```typescript
api.getOngoingOrders({ api.getOngoingOrders({
// The `limit` parameter // The `limit` parameter
// is optional, but there is // is optional, but there is
@ -51,7 +51,7 @@ api.getOngoingOrders({
With this approach, however, other problems arise. Let us imagine three orders are being processed on behalf of the user: With this approach, however, other problems arise. Let us imagine three orders are being processed on behalf of the user:
``` ```json
[{ [{
"id": 3, "id": 3,
"created_iso_time": "2022-12-22T15:35", "created_iso_time": "2022-12-22T15:35",
@ -69,7 +69,7 @@ With this approach, however, other problems arise. Let us imagine three orders a
A partner application requested the first page of the list: A partner application requested the first page of the list:
``` ```typescript
api.getOrders({ api.getOrders({
"limit": 2, "limit": 2,
"parameters": { "parameters": {
@ -91,7 +91,7 @@ api.getOrders({
Then the application requests the second page (`"limit": 2, "offset": 2`) and expects to retrieve the order with `"id": 1`. However, during the interval between the requests, another order, with `"id": 4`, happened. Then the application requests the second page (`"limit": 2, "offset": 2`) and expects to retrieve the order with `"id": 1`. However, during the interval between the requests, another order, with `"id": 4`, happened.
``` ```json
[{ [{
"id": 4, "id": 4,
"created_iso_time": "2022-12-22T15:36", "created_iso_time": "2022-12-22T15:36",
@ -113,7 +113,7 @@ Then the application requests the second page (`"limit": 2, "offset": 2`) and ex
Then upon requesting the second page of the order list, instead of getting exactly one order with `"id": 1`, the application will get the `"id": 2` order once again: Then upon requesting the second page of the order list, instead of getting exactly one order with `"id": 1`, the application will get the `"id": 2` order once again:
``` ```typescript
api.getOrders({ api.getOrders({
"limit": 2, "limit": 2,
"offset": 2 "offset": 2
@ -133,7 +133,7 @@ These permutations are rather inconvenient in user interfaces (if let's say, the
The problem might easily become even more sophisticated. For example, if we add sorting by two fields, creation date and order status: The problem might easily become even more sophisticated. For example, if we add sorting by two fields, creation date and order status:
``` ```typescript
api.getOrders({ api.getOrders({
"limit": 2, "limit": 2,
"parameters": { "parameters": {
@ -172,9 +172,9 @@ The easiest case is with immutable lists, i.e., when the set of items never chan
The case of a list with immutable items and the operation of adding new ones is more typical. Most notably, we talk about event queues containing, for example, new messages or notifications. Let's imagine there is an endpoint in our coffee API that allows partners to retrieve the history of offers: The case of a list with immutable items and the operation of adding new ones is more typical. Most notably, we talk about event queues containing, for example, new messages or notifications. Let's imagine there is an endpoint in our coffee API that allows partners to retrieve the history of offers:
``` ```json
GET /v1/partners/{id}/offers/history⮠ GET /v1/partners/{id}/offers/history⮠
limit=<limit> ?limit=<limit>
{ {
"offer_history": [{ "offer_history": [{
@ -210,15 +210,15 @@ To solve this issue, we need to rely not on an attribute that constantly changes
If the data storage we use for keeping list items offers the possibility of using monotonically increased identifiers (which practically means two things: (1) the DB supports auto-incremental columns and (2) there are insert locks that guarantee inserts are performed sequentially), then using the monotonous identifier is the most convenient way of organizing list traversal: If the data storage we use for keeping list items offers the possibility of using monotonically increased identifiers (which practically means two things: (1) the DB supports auto-incremental columns and (2) there are insert locks that guarantee inserts are performed sequentially), then using the monotonous identifier is the most convenient way of organizing list traversal:
``` ```json
// Retrieve the records that precede // Retrieve the records that precede
// the one with the given id // the one with the given id
GET /v1/partners/{id}/offers/history⮠ GET /v1/partners/{id}/offers/history⮠
newer_than=<item_id>&limit=<limit> ?newer_than=<item_id>&limit=<limit>
// Retrieve the records that follow // Retrieve the records that follow
// the one with the given id // the one with the given id
GET /v1/partners/{id}/offers/history⮠ GET /v1/partners/{id}/offers/history⮠
older_than=<item_id>&limit=<limit> ?older_than=<item_id>&limit=<limit>
``` ```
The first request format allows for implementing the first scenario, i.e., retrieving the fresh portion of the data. Conversely, the second format makes it possible to consistently iterate over the data to fulfill the second scenario. Importantly, the second request is cacheable as the tail of the list never changes. The first request format allows for implementing the first scenario, i.e., retrieving the fresh portion of the data. Conversely, the second format makes it possible to consistently iterate over the data to fulfill the second scenario. Importantly, the second request is cacheable as the tail of the list never changes.
@ -233,10 +233,10 @@ Another possible anchor to rely on is the record creation date. However, this ap
Often, the interfaces of traversing data through stating boundaries are generalized by introducing the concept of a “cursor”: Often, the interfaces of traversing data through stating boundaries are generalized by introducing the concept of a “cursor”:
``` ```json
// Initiate list traversal // Initiate list traversal
POST /v1/partners/{id}/offers/history⮠ POST /v1/partners/{id}/offers/history⮠
search /search
{ {
"order_by": [{ "order_by": [{
"field": "created", "field": "created",
@ -249,7 +249,7 @@ POST /v1/partners/{id}/offers/history⮠
} }
``` ```
``` ```json
// Get the next data chunk // Get the next data chunk
GET /v1/partners/{id}/offers/history⮠ GET /v1/partners/{id}/offers/history⮠
?cursor=TmluZSBQcmluY2VzIGluIEFtYmVy⮠ ?cursor=TmluZSBQcmluY2VzIGluIEFtYmVy⮠
@ -266,10 +266,10 @@ A *cursor* might be just an encoded identifier of the last record or it might co
The cursor-based approach also allows adding new filters and sorting directions in a backward-compatible manner — provided you organize the data in a way that cursor-based traversal will continue working. The cursor-based approach also allows adding new filters and sorting directions in a backward-compatible manner — provided you organize the data in a way that cursor-based traversal will continue working.
``` ```json
// Initialize list traversal // Initialize list traversal
POST /v1/partners/{id}/offers/history⮠ POST /v1/partners/{id}/offers/history⮠
search /search
{ {
// Add a filter by the recipe // Add a filter by the recipe
"filter": { "filter": {
@ -305,7 +305,7 @@ Unfortunately, it is not universally possible to organize the data in a way that
Sometimes, the task can be *reduced* to an immutable list if we create a snapshot of the data. In many cases, it is actually more convenient for partners to work with a snapshot that is current for a specific date as it eliminates the necessity of taking ongoing changes into account. This approach works well with accessing “cold” data storage by downloading chunks of data and putting them into “hot” storage upon request. Sometimes, the task can be *reduced* to an immutable list if we create a snapshot of the data. In many cases, it is actually more convenient for partners to work with a snapshot that is current for a specific date as it eliminates the necessity of taking ongoing changes into account. This approach works well with accessing “cold” data storage by downloading chunks of data and putting them into “hot” storage upon request.
``` ```json
POST /v1/orders/archive/retrieve POST /v1/orders/archive/retrieve
{ {
"created_iso_date": { "created_iso_date": {
@ -330,7 +330,7 @@ The inverse approach to the problem is to never provide more than one page of da
If none of the approaches above works, our only solution is changing the subject area itself. If we can't consistently enumerate list elements, we need to find a facet of the same data that we *can* enumerate. In our example with the ongoing orders we might make an ordered list of the *events* of creating new orders: If none of the approaches above works, our only solution is changing the subject area itself. If we can't consistently enumerate list elements, we need to find a facet of the same data that we *can* enumerate. In our example with the ongoing orders we might make an ordered list of the *events* of creating new orders:
``` ```json
// Retrieve all the events older // Retrieve all the events older
// than the one with the given id // than the one with the given id
GET /v1/orders/created-history⮠ GET /v1/orders/created-history⮠

View File

@ -2,9 +2,9 @@
In the previous chapter, we discussed the following scenario: a partner receives information about new events occuring in the system by periodically requesting an endpoint that supports retrieving ordered lists. In the previous chapter, we discussed the following scenario: a partner receives information about new events occuring in the system by periodically requesting an endpoint that supports retrieving ordered lists.
``` ```json
GET /v1/orders/created-history⮠ GET /v1/orders/created-history⮠
older_than=<item_id>&limit=<limit> ?older_than=<item_id>&limit=<limit>
{ {
"orders_created_events": [{ "orders_created_events": [{

View File

@ -6,7 +6,7 @@ One of the vexing restrictions of almost every technology mentioned in the previ
On the example of our coffee API: On the example of our coffee API:
``` ```json
// Option #1: the message // Option #1: the message
// contains all the order data // contains all the order data
POST /partner/webhook POST /partner/webhook
@ -24,7 +24,7 @@ Host: partners.host
} }
} }
``` ```
``` ```json
// Option #2: the message body // Option #2: the message body
// contains only the notification // contains only the notification
// of the status change // of the status change
@ -50,7 +50,7 @@ GET /v1/orders/{id}
{ /* full data regarding { /* full data regarding
the order */ } the order */ }
``` ```
``` ```json
// Option #3: the API vendor // Option #3: the API vendor
// notifies partners that // notifies partners that
// several orders await their // several orders await their
@ -85,7 +85,7 @@ Which option to select depends on the subject area (and on the allowed message s
The technique of sending only essential data in the notification has one important disadvantage, apart from more complicated data flows and increased request rate. With option \#1 implemented (i.e., the message contains all the data), we might assume that returning a success response by the subscriber is equivalent to successfully processing the state change by the partner (although it's not guaranteed if the partner uses asynchronous techniques). With options \#2 and \#3, this is certainly not the case: the partner must carry out additional actions (starting from retrieving the actual order state) to fully process the message. This implies that two separate statuses might be needed: “message received” and “message processed.” Ideally, the latter should follow the logic of the API work cycle, i.e., the partner should carry out some follow-up action upon processing the event, and this action might be treated as the “message processed” signal. In our coffee example, we can expect that the partner will either accept or reject an order after receiving the “new order” message. Then the full message processing flow will look like this: The technique of sending only essential data in the notification has one important disadvantage, apart from more complicated data flows and increased request rate. With option \#1 implemented (i.e., the message contains all the data), we might assume that returning a success response by the subscriber is equivalent to successfully processing the state change by the partner (although it's not guaranteed if the partner uses asynchronous techniques). With options \#2 and \#3, this is certainly not the case: the partner must carry out additional actions (starting from retrieving the actual order state) to fully process the message. This implies that two separate statuses might be needed: “message received” and “message processed.” Ideally, the latter should follow the logic of the API work cycle, i.e., the partner should carry out some follow-up action upon processing the event, and this action might be treated as the “message processed” signal. In our coffee example, we can expect that the partner will either accept or reject an order after receiving the “new order” message. Then the full message processing flow will look like this:
``` ```json
// The API vendor // The API vendor
// notifies the partner that // notifies the partner that
// several orders await their // several orders await their
@ -98,7 +98,7 @@ Host: partners.host
<the number of pending orders> <the number of pending orders>
} }
``` ```
``` ```json
// In response, the partner // In response, the partner
// retrieves the list of // retrieves the list of
// pending orders // pending orders
@ -109,7 +109,7 @@ GET /v1/orders/pending
"cursor" "cursor"
} }
``` ```
``` ```json
// After the orders are processed, // After the orders are processed,
// the partners notify about this // the partners notify about this
// by calling the specific API // by calling the specific API

View File

@ -4,7 +4,7 @@ Let's transition from *webhooks* back to developing direct-call APIs. The design
Let's consider a scenario where the partner notifies us about status changes that have occurred for two orders: Let's consider a scenario where the partner notifies us about status changes that have occurred for two orders:
``` ```json
POST /v1/orders/bulk-status-change POST /v1/orders/bulk-status-change
{ {
"status_changes": [{ "status_changes": [{
@ -41,7 +41,7 @@ However, if we consider the situation from the partner's perspective, we realize
Now, let's consider a scenario where the partner receives an error from the API endpoint during the third step. What would developers do in such a situation? Most probably, one of the following solutions might be implemented in the partner's code: Now, let's consider a scenario where the partner receives an error from the API endpoint during the third step. What would developers do in such a situation? Most probably, one of the following solutions might be implemented in the partner's code:
1. Unconditional retry of the request: 1. Unconditional retry of the request:
``` ```typescript
// Retrieve the ongoing orders // Retrieve the ongoing orders
const pendingOrders = await api const pendingOrders = await api
.getPendingOrders(); .getPendingOrders();
@ -89,7 +89,7 @@ Now, let's consider a scenario where the partner receives an error from the API
**NB**: In the code sample above, we provide the “right” retry policy with exponentially increasing delays and a total limit on the number of retries, as we recommended earlier in the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter. However, be warned that real partners' code may frequently lack such precautions. For the sake of readability, we will skip this bulky construct in the following code samples. **NB**: In the code sample above, we provide the “right” retry policy with exponentially increasing delays and a total limit on the number of retries, as we recommended earlier in the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter. However, be warned that real partners' code may frequently lack such precautions. For the sake of readability, we will skip this bulky construct in the following code samples.
2. Retrying only failed sub-requests: 2. Retrying only failed sub-requests:
``` ```typescript
const pendingOrders = await api const pendingOrders = await api
.getPendingOrders(); .getPendingOrders();
let changes = let changes =
@ -130,7 +130,7 @@ Now, let's consider a scenario where the partner receives an error from the API
``` ```
3. Restarting the entire pipeline. In this case, the partner retrieves the list of pending orders anew and forms a new bulk change request: 3. Restarting the entire pipeline. In this case, the partner retrieves the list of pending orders anew and forms a new bulk change request:
``` ```typescript
do { do {
const pendingOrders = await api const pendingOrders = await api
.getPendingOrders(); .getPendingOrders();
@ -155,7 +155,7 @@ Now, let's introduce another crucial condition to the problem statement: imagine
This leads us to a seemingly paradoxical conclusion: in order to ensure the partners' code continues to function *somehow* and to allow them time to address their invalid sub-requests we should adopt the least strict non-idempotent non-atomic approach to the design of the bulk state change endpoint. However, we consider this conclusion to be incorrect: the “zoo” of possible client and server implementations and the associated problems demonstrate that *bulk state change endpoints are inherently undesirable*. Such endpoints require maintaining an additional layer of logic in both server and client code, and the logic itself is quite non-obvious. The non-atomic non-idempotent bulk state changes will very soon result in nasty issues: This leads us to a seemingly paradoxical conclusion: in order to ensure the partners' code continues to function *somehow* and to allow them time to address their invalid sub-requests we should adopt the least strict non-idempotent non-atomic approach to the design of the bulk state change endpoint. However, we consider this conclusion to be incorrect: the “zoo” of possible client and server implementations and the associated problems demonstrate that *bulk state change endpoints are inherently undesirable*. Such endpoints require maintaining an additional layer of logic in both server and client code, and the logic itself is quite non-obvious. The non-atomic non-idempotent bulk state changes will very soon result in nasty issues:
``` ```json
// A partner issues a refund // A partner issues a refund
// and cancels the order // and cancels the order
POST /v1/bulk-status-change POST /v1/bulk-status-change
@ -195,7 +195,7 @@ So, our recommendations for bulk modifying endpoints are:
One of the approaches that helps minimize potential issues is developing a “mixed” endpoint, in which the operations that can affect each other are grouped: One of the approaches that helps minimize potential issues is developing a “mixed” endpoint, in which the operations that can affect each other are grouped:
``` ```json
POST /v1/bulk-status-change POST /v1/bulk-status-change
{ {
"changes": [{ "changes": [{

View File

@ -2,7 +2,7 @@
The case of partial application of the list of changes described in the previous chapter naturally leads us to the next typical API design problem. What if the operation involves a low-level overwriting of several data fields rather than an atomic idempotent procedure (as in the case of changing the order status)? Let's take a look at the following example: The case of partial application of the list of changes described in the previous chapter naturally leads us to the next typical API design problem. What if the operation involves a low-level overwriting of several data fields rather than an atomic idempotent procedure (as in the case of changing the order status)? Let's take a look at the following example:
``` ```json
// Creates an order // Creates an order
// consisting of two beverages // consisting of two beverages
POST /v1/orders/ POST /v1/orders/
@ -20,7 +20,7 @@ X-Idempotency-Token: <token>
{ "order_id" } { "order_id" }
``` ```
``` ```json
// Partially updates the order // Partially updates the order
// by changing the volume // by changing the volume
// of the second beverage // of the second beverage
@ -58,7 +58,7 @@ To avoid these issues, developers sometimes implement a **naïve solution**:
A full example of an API implementing the naïve approach would look like this: A full example of an API implementing the naïve approach would look like this:
``` ```json
// Partially rewrites the order: // Partially rewrites the order:
// * Resets the delivery address // * Resets the delivery address
// to the default values // to the default values
@ -96,17 +96,17 @@ However, upon closer examination all these conclusions seem less viable:
The solution could be enhanced by introducing explicit control sequences instead of relying on “magical” values and adding meta settings for the operation (such as a field name filter as it's implemented in gRPC over Protobuf[ref Protocol Buffers. Field Masks in Update Operations](https://protobuf.dev/reference/protobuf/google.protobuf/#field-masks-updates)). Here's an example: The solution could be enhanced by introducing explicit control sequences instead of relying on “magical” values and adding meta settings for the operation (such as a field name filter as it's implemented in gRPC over Protobuf[ref Protocol Buffers. Field Masks in Update Operations](https://protobuf.dev/reference/protobuf/google.protobuf/#field-masks-updates)). Here's an example:
``` ```json
// Partially rewrites the order: // Partially rewrites the order:
// * Resets the delivery address // * Resets the delivery address
// to the default values // to the default values
// * Leaves the first beverage // * Leaves the first beverage
// intact // intact
// * Removes the second beverage. // * Removes the second beverage.
PATCH /v1/orders/{id}? PATCH /v1/orders/{id}⮠
// A meta filter: which fields // A meta filter: which fields
// are allowed to be modified // are allowed to be modified
field_mask=delivery_address,items ?field_mask=delivery_address,items
{ {
// “Special” value #1: // “Special” value #1:
// reset the field // reset the field
@ -135,7 +135,7 @@ Given that the format becomes more complex and less intuitively understandable,
A **more consistent solution** is to split an endpoint into several idempotent sub-endpoints, each having its own independent identifier and/or address (which is usually enough to ensure the transitivity of independent operations). This approach aligns well with the decomposition principle we discussed in the “[Isolating Responsibility Areas](#api-design-isolating-responsibility)” chapter. A **more consistent solution** is to split an endpoint into several idempotent sub-endpoints, each having its own independent identifier and/or address (which is usually enough to ensure the transitivity of independent operations). This approach aligns well with the decomposition principle we discussed in the “[Isolating Responsibility Areas](#api-design-isolating-responsibility)” chapter.
``` ```json
// Creates an order // Creates an order
// comprising two beverages // comprising two beverages
POST /v1/orders/ POST /v1/orders/
@ -164,7 +164,7 @@ POST /v1/orders/
} }
``` ```
``` ```json
// Changes the parameters // Changes the parameters
// of the second order // of the second order
PUT /v1/orders/{id}/parameters PUT /v1/orders/{id}/parameters
@ -173,7 +173,7 @@ PUT /v1/orders/{id}/parameters
{ "delivery_address" } { "delivery_address" }
``` ```
``` ```json
// Partially changes the order // Partially changes the order
// by rewriting the parameters // by rewriting the parameters
// of the second beverage // of the second beverage
@ -187,7 +187,7 @@ PUT /v1/orders/{id}/items/{item_id}
{ "recipe", "volume", "milk_type" } { "recipe", "volume", "milk_type" }
``` ```
``` ```json
// Deletes one of the beverages // Deletes one of the beverages
DELETE /v1/orders/{id}/items/{item_id} DELETE /v1/orders/{id}/items/{item_id}
``` ```
@ -210,7 +210,7 @@ To make true collaborative editing possible, a specifically designed format for
In our case, we might take this direction: In our case, we might take this direction:
``` ```json
POST /v1/order/changes POST /v1/order/changes
X-Idempotency-Token: <token> X-Idempotency-Token: <token>
{ {

View File

@ -25,7 +25,7 @@ One cannot make a partial commitment. Either you guarantee that the code will al
The third principle is much less obvious. Pay close attention to the code that you're suggesting developers write: are there any conventions that you consider self-evident but never wrote down? The third principle is much less obvious. Pay close attention to the code that you're suggesting developers write: are there any conventions that you consider self-evident but never wrote down?
**Example \#1**. Let's take a look at this order processing SDK example: **Example \#1**. Let's take a look at this order processing SDK example:
``` ```typescript
// Creates an order // Creates an order
let order = api.createOrder(); let order = api.createOrder();
// Returns the order status // Returns the order status
@ -36,7 +36,7 @@ Let's imagine that you're struggling with scaling your service, and at some poin
You may say something like, “But we've never promised strict consistency in the first place” — and that is obviously not true. You may say that if, and only if, you have really described the eventual consistency in the `createOrder` docs, and all your SDK examples look like this: You may say something like, “But we've never promised strict consistency in the first place” — and that is obviously not true. You may say that if, and only if, you have really described the eventual consistency in the `createOrder` docs, and all your SDK examples look like this:
``` ```typescript
let order = api.createOrder(); let order = api.createOrder();
let status; let status;
while (true) { while (true) {
@ -60,7 +60,7 @@ If you failed to describe the eventual consistency in the first place, then you
**Example \#2**. Take a look at the following code: **Example \#2**. Take a look at the following code:
``` ```typescript
let resolve; let resolve;
let promise = new Promise( let promise = new Promise(
function (innerResolve) { function (innerResolve) {
@ -76,7 +76,7 @@ Of course, the developers of the language standard can afford such tricks; but y
**Example \#3**. Imagine you're providing an animations API, which includes two independent functions: **Example \#3**. Imagine you're providing an animations API, which includes two independent functions:
``` ```typescript
// Animates object's width, // Animates object's width,
// beginning with the first value, // beginning with the first value,
// ending with the second // ending with the second
@ -98,7 +98,7 @@ In this example, you should document the concrete contract (how often the observ
**Example \#4**. Imagine that customer orders are passing through a specific pipeline: **Example \#4**. Imagine that customer orders are passing through a specific pipeline:
``` ```json
GET /v1/orders/{id}/events/history GET /v1/orders/{id}/events/history
{ "event_history": [ { "event_history": [

View File

@ -14,7 +14,7 @@ Let us take the next logical step and suppose that partners will wish to dynamic
For example, we might provide a second API family (the partner-bound one) with the following methods: For example, we might provide a second API family (the partner-bound one) with the following methods:
``` ```json
// 1. Register a new API type // 1. Register a new API type
PUT /v1/api-types/{api_type} PUT /v1/api-types/{api_type}
{ {
@ -24,7 +24,7 @@ PUT /v1/api-types/{api_type}
} }
``` ```
``` ```json
// 2. Provide a list of coffee machines // 2. Provide a list of coffee machines
// with their API types // with their API types
PUT /v1/partners/{partnerId}/coffee-machines PUT /v1/partners/{partnerId}/coffee-machines
@ -61,7 +61,7 @@ The universal approach to making such amendments is to consider the existing int
More specifically, if we talk about changing available order options, we should do the following: More specifically, if we talk about changing available order options, we should do the following:
1. Describe the current state. All coffee machines, plugged via the API, must support three options: sprinkling with cinnamon, changing the volume, and contactless delivery. 1. Describe the current state. All coffee machines, plugged via the API, must support three options: sprinkling with cinnamon, changing the volume, and contactless delivery.
2. Add a new “with options” endpoint: 2. Add a new “with options” endpoint:
``` ```json
PUT /v1/partners/{partner_id}⮠ PUT /v1/partners/{partner_id}⮠
/coffee-machines-with-options /coffee-machines-with-options
{ {

View File

@ -4,7 +4,7 @@ To demonstrate the problems of strong coupling, let's move on to *interesting* t
So, let's add one more endpoint for registering the partner's own recipe: So, let's add one more endpoint for registering the partner's own recipe:
``` ```json
// Adds new recipe // Adds new recipe
POST /v1/recipes POST /v1/recipes
{ {
@ -24,7 +24,7 @@ At first glance, this appears to be a reasonably simple interface, explicitly de
The first problem is obvious to those who thoroughly read the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter: product properties must be localized. This leads us to the first change: The first problem is obvious to those who thoroughly read the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter: product properties must be localized. This leads us to the first change:
``` ```json
"product_properties": { "product_properties": {
// "l10n" is the standard abbreviation // "l10n" is the standard abbreviation
// for "localization" // for "localization"
@ -57,7 +57,7 @@ To exacerbate matters, let us state that the inverse principle is also true: hig
We have already identified a localization context. There is a set of languages and regions supported by our API, and there are requirements for what partners must provide to make the API work in a new region. Specifically, there must be a formatting function to represent beverage volume somewhere in our API code, either internally or within an SDK: We have already identified a localization context. There is a set of languages and regions supported by our API, and there are requirements for what partners must provide to make the API work in a new region. Specifically, there must be a formatting function to represent beverage volume somewhere in our API code, either internally or within an SDK:
``` ```typescript
l10n.volume.format = function( l10n.volume.format = function(
value, language_code, country_code value, language_code, country_code
) { … } ) { … }
@ -73,7 +73,7 @@ l10n.volume.format = function(
To ensure our API works correctly with a new language or region, the partner must either define this function or indicate which pre-existing implementation to use through the partner API, like this: To ensure our API works correctly with a new language or region, the partner must either define this function or indicate which pre-existing implementation to use through the partner API, like this:
``` ```json
// Add a general formatting rule // Add a general formatting rule
// for the Russian language // for the Russian language
PUT /formatters/volume/ru PUT /formatters/volume/ru
@ -101,7 +101,7 @@ so the aforementioned `l10n.volume.format` function implementation can retrieve
Let's address the `name` and `description` problem. To reduce the coupling level, we need to formalize (probably just for ourselves) a “layout” concept. We request the provision of the `name` and `description` fields not because we theoretically need them but to present them in a specific user interface. This particular UI might have an identifier or a semantic name associated with it: Let's address the `name` and `description` problem. To reduce the coupling level, we need to formalize (probably just for ourselves) a “layout” concept. We request the provision of the `name` and `description` fields not because we theoretically need them but to present them in a specific user interface. This particular UI might have an identifier or a semantic name associated with it:
``` ```json
GET /v1/layouts/{layout_id} GET /v1/layouts/{layout_id}
{ {
"id", "id",
@ -136,7 +136,7 @@ GET /v1/layouts/{layout_id}
Thus, the partner can decide which option better suits their needs. They can provide mandatory fields for the standard layout: Thus, the partner can decide which option better suits their needs. They can provide mandatory fields for the standard layout:
``` ```json
PUT /v1/recipes/{id}/properties/l10n/{lang} PUT /v1/recipes/{id}/properties/l10n/{lang}
{ {
"search_title", "search_description" "search_title", "search_description"
@ -147,7 +147,7 @@ Alternatively, they can create their own layout and provide the data fields it r
Ultimately, our interface would look like this: Ultimately, our interface would look like this:
``` ```json
POST /v1/recipes POST /v1/recipes
{ "id" } { "id" }
@ -156,7 +156,7 @@ POST /v1/recipes
This conclusion might seem highly counter-intuitive, but the absence of fields in a `Recipe` simply tells us that this entity possesses no specific semantics of its own. It serves solely as an identifier of a context, a way to indicate where to find the data needed by other entities. In the real world, we should implement a builder endpoint capable of creating all the related contexts with a single request: This conclusion might seem highly counter-intuitive, but the absence of fields in a `Recipe` simply tells us that this entity possesses no specific semantics of its own. It serves solely as an identifier of a context, a way to indicate where to find the data needed by other entities. In the real world, we should implement a builder endpoint capable of creating all the related contexts with a single request:
``` ```json
POST /v1/recipe-builder POST /v1/recipe-builder
{ {
"id", "id",
@ -191,7 +191,7 @@ POST /v1/recipe-builder
We should also note that providing a newly created entity identifier from the requesting side is not the best practice. However, since we decided from the very beginning to keep recipe identifiers semantically meaningful, we have to live on with this convention. Obviously, there is a risk of encountering collisions with recipe names used by different partners. Therefore, we actually need to modify this operation: either a partner must always use a pair of identifiers (e.g., the recipe id plus the partner's own id), or we need to introduce composite identifiers, as we recommended earlier in the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter. We should also note that providing a newly created entity identifier from the requesting side is not the best practice. However, since we decided from the very beginning to keep recipe identifiers semantically meaningful, we have to live on with this convention. Obviously, there is a risk of encountering collisions with recipe names used by different partners. Therefore, we actually need to modify this operation: either a partner must always use a pair of identifiers (e.g., the recipe id plus the partner's own id), or we need to introduce composite identifiers, as we recommended earlier in the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter.
``` ```json
POST /v1/recipes/custom POST /v1/recipes/custom
{ {
// The first part of the composite // The first part of the composite
@ -212,7 +212,7 @@ Also note that this format allows us to maintain an important extensibility poin
**NB**: A mindful reader might have noticed that this technique was already used in our API study much earlier in the “[Separating Abstraction Levels](#api-design-separating-abstractions)” chapter regarding the “program” and “program run” entities. Indeed, we can propose an interface for retrieving commands to execute a specific recipe without the `program-matcher` endpoint, and instead, do it this way: **NB**: A mindful reader might have noticed that this technique was already used in our API study much earlier in the “[Separating Abstraction Levels](#api-design-separating-abstractions)” chapter regarding the “program” and “program run” entities. Indeed, we can propose an interface for retrieving commands to execute a specific recipe without the `program-matcher` endpoint, and instead, do it this way:
``` ```json
GET /v1/recipes/{id}/run-data/{api_type} GET /v1/recipes/{id}/run-data/{api_type}
{ /* A description of how to { /* A description of how to

View File

@ -2,7 +2,7 @@
In the previous chapter, we demonstrated how breaking strong coupling of components leads to decomposing entities and collapsing their public interfaces down to a reasonable minimum. But let us return to the question we previously mentioned in the “[Extending through Abstracting](#back-compat-abstracting-extending)” chapter: how should we parametrize the order preparation process implemented via a third-party API? In other words, what *is* the `order_execution_endpoint` required in the API type registration handler? In the previous chapter, we demonstrated how breaking strong coupling of components leads to decomposing entities and collapsing their public interfaces down to a reasonable minimum. But let us return to the question we previously mentioned in the “[Extending through Abstracting](#back-compat-abstracting-extending)” chapter: how should we parametrize the order preparation process implemented via a third-party API? In other words, what *is* the `order_execution_endpoint` required in the API type registration handler?
``` ```json
PUT /v1/api-types/{api_type} PUT /v1/api-types/{api_type}
{ {
@ -14,7 +14,7 @@ PUT /v1/api-types/{api_type}
From general considerations, we may assume that every such API would be capable of executing three functions: running a program with specified parameters, returning the current execution status, and finishing (canceling) the order. An obvious way to provide the common interface is to require these three functions to be executed via a remote call, let's say, like this: From general considerations, we may assume that every such API would be capable of executing three functions: running a program with specified parameters, returning the current execution status, and finishing (canceling) the order. An obvious way to provide the common interface is to require these three functions to be executed via a remote call, let's say, like this:
``` ```json
PUT /v1/api-types/{api_type} PUT /v1/api-types/{api_type}
{ {
@ -67,7 +67,7 @@ In our case we need to implement the following mechanisms:
There are different techniques to organize this data flow (see the [corresponding chapter](#api-patterns-push-vs-poll) of the “API Patterns” Section of this book). Basically, we always have two contexts and a two-way data pipe in between. If we were developing an SDK, we would express the idea with emitting and listening events, like this: There are different techniques to organize this data flow (see the [corresponding chapter](#api-patterns-push-vs-poll) of the “API Patterns” Section of this book). Basically, we always have two contexts and a two-way data pipe in between. If we were developing an SDK, we would express the idea with emitting and listening events, like this:
``` ```typescript
/* Partner's implementation of the program /* Partner's implementation of the program
run procedure for a custom API type */ run procedure for a custom API type */
registerProgramRunHandler( registerProgramRunHandler(
@ -125,7 +125,7 @@ One more important feature of weak coupling is that it allows an entity to have
It becomes obvious from what was said above that two-way weak coupling means a significant increase in code complexity on both levels, which is often redundant. In many cases, two-way event linking might be replaced with one-way linking without significant loss of design quality. That means allowing a low-level entity to call higher-level methods directly instead of generating events. Let's alter our example: It becomes obvious from what was said above that two-way weak coupling means a significant increase in code complexity on both levels, which is often redundant. In many cases, two-way event linking might be replaced with one-way linking without significant loss of design quality. That means allowing a low-level entity to call higher-level methods directly instead of generating events. Let's alter our example:
``` ```typescript
/* Partner's implementation of the program /* Partner's implementation of the program
run procedure for a custom API type */ run procedure for a custom API type */
registerProgramRunHandler( registerProgramRunHandler(
@ -172,7 +172,7 @@ In conclusion, as higher-level APIs are evolving more slowly and much more consi
**NB**: Many contemporary frameworks explore a shared state approach, Redux being probably the most notable example. In the Redux paradigm, the code above would look like this: **NB**: Many contemporary frameworks explore a shared state approach, Redux being probably the most notable example. In the Redux paradigm, the code above would look like this:
``` ```typescript
program.context.on( program.context.on(
'takeout_requested', 'takeout_requested',
() => { () => {
@ -189,7 +189,7 @@ program.context.on(
Let us note that this approach *in general* doesn't contradict the weak coupling principle but violates another one — abstraction levels isolation — and therefore isn't very well suited for writing branchy APIs with high hierarchy trees. In such systems, it's still possible to use a global or quasi-global state manager, but you need to implement event or method call propagation through the hierarchy, i.e., ensure that a low-level entity always interacts with its closest higher-level neighbors only, delegating the responsibility of calling high-level or global methods to them. Let us note that this approach *in general* doesn't contradict the weak coupling principle but violates another one — abstraction levels isolation — and therefore isn't very well suited for writing branchy APIs with high hierarchy trees. In such systems, it's still possible to use a global or quasi-global state manager, but you need to implement event or method call propagation through the hierarchy, i.e., ensure that a low-level entity always interacts with its closest higher-level neighbors only, delegating the responsibility of calling high-level or global methods to them.
``` ```typescript
program.context.on( program.context.on(
'takeout_requested', 'takeout_requested',
() => { () => {
@ -203,7 +203,7 @@ program.context.on(
); );
``` ```
``` ```typescript
// program.context.dispatch implementation // program.context.dispatch implementation
ProgramContext.dispatch = (action) => { ProgramContext.dispatch = (action) => {
// program.context calls its own // program.context calls its own

View File

@ -2,7 +2,7 @@
After reviewing the previous chapter, the reader may wonder why this dichotomy exists in the first place, i.e., why do some HTTP APIs rely on HTTP semantics, while others reject it in favor of custom arrangements, and still others are stuck somewhere in between? For example, if we consider the JSON-RPC response format,[ref JSON-RPC 2.0 Specification. Response object](https://www.jsonrpc.org/specification#response_object) we quickly notice that it could be replaced with standard HTTP protocol functionality. Instead of this: After reviewing the previous chapter, the reader may wonder why this dichotomy exists in the first place, i.e., why do some HTTP APIs rely on HTTP semantics, while others reject it in favor of custom arrangements, and still others are stuck somewhere in between? For example, if we consider the JSON-RPC response format,[ref JSON-RPC 2.0 Specification. Response object](https://www.jsonrpc.org/specification#response_object) we quickly notice that it could be replaced with standard HTTP protocol functionality. Instead of this:
``` ```json
HTTP/1.1 200 OK HTTP/1.1 200 OK
{ {

View File

@ -6,7 +6,7 @@ To describe the semantics and formats, we will refer to the brand-new RFC 9110[r
An HTTP request consists of (1) applying a specific verb to a URL, stating (2) the protocol version, (3) additional meta-information in headers, and (4) optionally, some content (request body): An HTTP request consists of (1) applying a specific verb to a URL, stating (2) the protocol version, (3) additional meta-information in headers, and (4) optionally, some content (request body):
``` ```json
POST /v1/orders HTTP/1.1 POST /v1/orders HTTP/1.1
Host: our-api-host.tld Host: our-api-host.tld
Content-Type: application/json Content-Type: application/json
@ -23,7 +23,7 @@ Content-Type: application/json
An HTTP response to such a request includes (1) the protocol version, (2) a status code with a corresponding message, (3) response headers, and (4) optionally, response content (body): An HTTP response to such a request includes (1) the protocol version, (2) a status code with a corresponding message, (3) response headers, and (4) optionally, response content (body):
``` ```json
HTTP/1.1 201 Created HTTP/1.1 201 Created
Location: /v1/orders/123 Location: /v1/orders/123
Content-Type: application/json Content-Type: application/json
@ -137,12 +137,12 @@ One parameter might be placed in different components of an HTTP request. For ex
* A path, e.g., `/v1/{partner_id}/orders` * A path, e.g., `/v1/{partner_id}/orders`
* A query parameter, e.g. `/v1/orders?partner_id=<partner_id>` * A query parameter, e.g. `/v1/orders?partner_id=<partner_id>`
* A header value, e.g. * A header value, e.g.
``` ```json
GET /v1/orders HTTP/1.1 GET /v1/orders HTTP/1.1
X-ApiName-Partner-Id: <partner_id> X-ApiName-Partner-Id: <partner_id>
``` ```
* A field within the request body, e.g. * A field within the request body, e.g.
``` ```json
POST /v1/orders/retrieve HTTP/1.1 POST /v1/orders/retrieve HTTP/1.1
{ {

View File

@ -20,7 +20,7 @@ We need to apply these principles to an HTTP-based interface, adhering to the le
Let's talk about organizing HTTP APIs based on a specific example. Imagine an application start procedure: as a rule of thumb, the application requests the current user profile and important information regarding them (in our case, ongoing orders), using the authorization token saved in the device's memory. We can propose a straightforward endpoint for this purpose: Let's talk about organizing HTTP APIs based on a specific example. Imagine an application start procedure: as a rule of thumb, the application requests the current user profile and important information regarding them (in our case, ongoing orders), using the authorization token saved in the device's memory. We can propose a straightforward endpoint for this purpose:
``` ```json
GET /v1/state HTTP/1.1 GET /v1/state HTTP/1.1
Authorization: Bearer <token> Authorization: Bearer <token>
@ -52,11 +52,11 @@ This implies that a request traverses the following path:
It is quite obvious that in this setup, we put excessive load on the authorization service as every nested microservice now needs to query it. Even if we abolish checking the authenticity of internal requests, it won't help as services B and C can't know the identifier of the user. Naturally, this leads to the idea of propagating the once-retrieved `user_id` through the microservice mesh: It is quite obvious that in this setup, we put excessive load on the authorization service as every nested microservice now needs to query it. Even if we abolish checking the authenticity of internal requests, it won't help as services B and C can't know the identifier of the user. Naturally, this leads to the idea of propagating the once-retrieved `user_id` through the microservice mesh:
* Gateway D receives a request and exchanges the token for `user_id` through service A * Gateway D receives a request and exchanges the token for `user_id` through service A
* Gateway D queries service B: * Gateway D queries service B:
``` ```json
GET /v1/profiles/{user_id} GET /v1/profiles/{user_id}
``` ```
and service C: and service C:
``` ```json
GET /v1/orders?user_id=<user id> GET /v1/orders?user_id=<user id>
``` ```
@ -85,7 +85,7 @@ Alternatively, we can rely on HTTP caching which is most likely already implemen
Now let's shift our attention to service C. The results retrieved from it might also be cached. However, the state of an ongoing order changes more frequently than the user's profiles, and returning an invalid state might entail objectionable consequences. However, as discussed in the “[Synchronization Strategies](#api-patterns-sync-strategies)” chapter, we need optimistic concurrency control (i.e., the resource revision) to ensure the functionality works correctly, and nothing could prevent us from using this revision as a cache key. Let service C return a tag describing the current state of the user's orders: Now let's shift our attention to service C. The results retrieved from it might also be cached. However, the state of an ongoing order changes more frequently than the user's profiles, and returning an invalid state might entail objectionable consequences. However, as discussed in the “[Synchronization Strategies](#api-patterns-sync-strategies)” chapter, we need optimistic concurrency control (i.e., the resource revision) to ensure the functionality works correctly, and nothing could prevent us from using this revision as a cache key. Let service C return a tag describing the current state of the user's orders:
``` ```json
GET /v1/orders?user_id=<user_id> HTTP/1.1 GET /v1/orders?user_id=<user_id> HTTP/1.1
HTTP/1.1 200 OK HTTP/1.1 200 OK
@ -98,7 +98,7 @@ Then gateway D can be implemented following this scenario:
2. Upon receiving a subsequent request: 2. Upon receiving a subsequent request:
* Fetch the cached state, if any * Fetch the cached state, if any
* Query service C passing the following parameters: * Query service C passing the following parameters:
``` ```json
GET /v1/orders?user_id=<user_id> HTTP/1.1 GET /v1/orders?user_id=<user_id> HTTP/1.1
If-None-Match: <revision> If-None-Match: <revision>
``` ```
@ -109,21 +109,21 @@ Then gateway D can be implemented following this scenario:
By employing this approach [using `ETag`s to control caching], we automatically get another pleasant bonus. We can reuse the same data in the order creation endpoint design. In the optimistic concurrency control paradigm, the client must pass the actual revision of the `orders` resource to change its state: By employing this approach [using `ETag`s to control caching], we automatically get another pleasant bonus. We can reuse the same data in the order creation endpoint design. In the optimistic concurrency control paradigm, the client must pass the actual revision of the `orders` resource to change its state:
``` ```json
POST /v1/orders HTTP/1.1 POST /v1/orders HTTP/1.1
If-Match: <revision> If-Match: <revision>
``` ```
Gateway D will add the user's identifier to the request and query service C: Gateway D will add the user's identifier to the request and query service C:
``` ```json
POST /v1/orders?user_id=<user_id> HTTP/1.1 POST /v1/orders?user_id=<user_id> HTTP/1.1
If-Match: <revision> If-Match: <revision>
``` ```
If the revision is valid and the operation is executed, service C might return the updated list of orders alongside the new revision: If the revision is valid and the operation is executed, service C might return the updated list of orders alongside the new revision:
``` ```json
HTTP/1.1 201 Created HTTP/1.1 201 Created
Content-Location: /v1/orders?user_id=<user_id> Content-Location: /v1/orders?user_id=<user_id>
ETag: <new revision> ETag: <new revision>
@ -153,12 +153,12 @@ Let us reiterate once more that we can achieve exactly the same qualities with R
Let's elaborate a bit on the no-authorizing service solution (or, to be more precise, the solution with the authorizing functionality being implemented as a library or a local daemon inside services B, C, and D) with all the data embedded in the authorization token itself. In this scenario, every service performs the following actions: Let's elaborate a bit on the no-authorizing service solution (or, to be more precise, the solution with the authorizing functionality being implemented as a library or a local daemon inside services B, C, and D) with all the data embedded in the authorization token itself. In this scenario, every service performs the following actions:
1. Receives a request like this: 1. Receives a request like this:
``` ```json
GET /v1/profiles/{user_id} GET /v1/profiles/{user_id}
Authorization: Bearer <token> Authorization: Bearer <token>
``` ```
2. Deciphers the token and retrieves a payload. For example, in the following format: 2. Deciphers the token and retrieves a payload. For example, in the following format:
``` ```json
{ {
// The identifier of a user // The identifier of a user
// who owns the token // who owns the token
@ -171,7 +171,7 @@ Let's elaborate a bit on the no-authorizing service solution (or, to be more pre
The necessity to compare two `user_id`s might appear illogical and redundant. However, this opinion is invalid; it originates from the widespread (anti)pattern we started the chapter with, namely the stateful determining of operation parameters: The necessity to compare two `user_id`s might appear illogical and redundant. However, this opinion is invalid; it originates from the widespread (anti)pattern we started the chapter with, namely the stateful determining of operation parameters:
``` ```json
GET /v1/profile GET /v1/profile
Authorization: Bearer <token> Authorization: Bearer <token>
``` ```
@ -185,7 +185,7 @@ The problem with this approach is that *splitting* these three operations is not
In the case of the “triple-stacked” access checking endpoint, our only option is implementing a new endpoint with a new interface. With stateless tokens, we might do the following: In the case of the “triple-stacked” access checking endpoint, our only option is implementing a new endpoint with a new interface. With stateless tokens, we might do the following:
1. Include in the token *a list* of the users that the token allows access to: 1. Include in the token *a list* of the users that the token allows access to:
``` ```json
{ {
// The list of identifiers // The list of identifiers
// of user profiles accessible // of user profiles accessible

View File

@ -16,7 +16,7 @@ This convention allows for reflecting almost any API's entity nomenclature decen
* A path parameter: `/v1/orders/{id}` * A path parameter: `/v1/orders/{id}`
* A query parameter: `/orders/{id}?version=1` * A query parameter: `/orders/{id}?version=1`
* A header: * A header:
``` ```json
GET /orders/{id} HTTP/1.1 GET /orders/{id} HTTP/1.1
X-OurCoffeeAPI-Version: 1 X-OurCoffeeAPI-Version: 1
``` ```
@ -83,7 +83,7 @@ The CRUD/HTTP correspondence might appear convenient as every resource is forced
Let's start with the resource creation operation. As we remember from the “[Synchronization Strategies](#api-patterns-sync-strategies)” chapter, in any important subject area, creating entities must be an idempotent procedure that ideally allows for controlling concurrency. In the HTTP API paradigm, idempotent creation could be implemented using one of the following three approaches: Let's start with the resource creation operation. As we remember from the “[Synchronization Strategies](#api-patterns-sync-strategies)” chapter, in any important subject area, creating entities must be an idempotent procedure that ideally allows for controlling concurrency. In the HTTP API paradigm, idempotent creation could be implemented using one of the following three approaches:
1. Through the `POST` method with passing an idempotency token (in which capacity the resource `ETag` might be employed): 1. Through the `POST` method with passing an idempotency token (in which capacity the resource `ETag` might be employed):
``` ```json
POST /v1/orders/?user_id=<user_id> HTTP/1.1 POST /v1/orders/?user_id=<user_id> HTTP/1.1
If-Match: <revision> If-Match: <revision>
@ -91,7 +91,7 @@ Let's start with the resource creation operation. As we remember from the “[Sy
``` ```
2. Through the `PUT` method, implying that the entity identifier is generated by the client. Revision still could be used for controlling concurrency; however, the idempotency token is the URL itself: 2. Through the `PUT` method, implying that the entity identifier is generated by the client. Revision still could be used for controlling concurrency; however, the idempotency token is the URL itself:
``` ```json
PUT /v1/orders/{order_id} HTTP/1.1 PUT /v1/orders/{order_id} HTTP/1.1
If-Match: <revision> If-Match: <revision>
@ -100,7 +100,7 @@ Let's start with the resource creation operation. As we remember from the “[Sy
3. By creating a draft with the `POST` method and then committing it with the `PUT` method: 3. By creating a draft with the `POST` method and then committing it with the `PUT` method:
``` ```json
POST /v1/drafts HTTP/1.1 POST /v1/drafts HTTP/1.1
{ … } { … }
@ -108,7 +108,7 @@ Let's start with the resource creation operation. As we remember from the “[Sy
HTTP/1.1 201 Created HTTP/1.1 201 Created
Location: /v1/drafts/{id} Location: /v1/drafts/{id}
``` ```
``` ```json
PUT /v1/drafts/{id}/commit PUT /v1/drafts/{id}/commit
If-Match: <revision> If-Match: <revision>

View File

@ -4,7 +4,7 @@ The examples of organizing HTTP APIs discussed in the previous chapters were mos
Imagine that some actor (a client or a gateway) tries to create a new order: Imagine that some actor (a client or a gateway) tries to create a new order:
``` ```json
POST /v1/orders?user_id=<user_id> HTTP/1.1 POST /v1/orders?user_id=<user_id> HTTP/1.1
Authorization: Bearer <token> Authorization: Bearer <token>
If-Match: <revision> If-Match: <revision>
@ -70,7 +70,7 @@ Additionally, there is a third dimension to this problem in the form of webserve
All these observations naturally lead us to the following conclusion: if we want to use errors for diagnostics and (possibly) helping clients to recover, we need to include machine-readable metadata about the error subtype and, possibly, additional properties to the error body with a detailed description of the error. For example, as we proposed in the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter: All these observations naturally lead us to the following conclusion: if we want to use errors for diagnostics and (possibly) helping clients to recover, we need to include machine-readable metadata about the error subtype and, possibly, additional properties to the error body with a detailed description of the error. For example, as we proposed in the “[Describing Final Interfaces](#api-design-describing-interfaces)” chapter:
``` ```json
POST /v1/coffee-machines/search HTTP/1.1 POST /v1/coffee-machines/search HTTP/1.1
{ {
@ -125,7 +125,7 @@ Let us also remind the reader that the client must treat unknown `4xx` status co
However, for internal systems, this argumentation is wrong. To build proper monitoring and notification systems, server errors must contain machine-readable error subtypes, just like the client errors. The same approaches are applicable (either using arbitrary status codes and/or passing error kind as a header); however, this data must be stripped off by a gateway that marks the border between external and internal systems and replaced with general instructions for both developers and end users, describing actions that need to be performed upon receiving an error. However, for internal systems, this argumentation is wrong. To build proper monitoring and notification systems, server errors must contain machine-readable error subtypes, just like the client errors. The same approaches are applicable (either using arbitrary status codes and/or passing error kind as a header); however, this data must be stripped off by a gateway that marks the border between external and internal systems and replaced with general instructions for both developers and end users, describing actions that need to be performed upon receiving an error.
``` ```json
POST /v1/orders/?user_id=<user id> HTTP/1.1 POST /v1/orders/?user_id=<user id> HTTP/1.1
If-Match: <revision> If-Match: <revision>
@ -143,7 +143,7 @@ X-OurCoffeAPI-Error-Kind: db_timeout
* which host returned an error * which host returned an error
*/ } */ }
``` ```
``` ```json
// The response as returned to // The response as returned to
// the client. The details regarding // the client. The details regarding
// the server error are removed // the server error are removed

View File

@ -23,7 +23,7 @@ However, there are also non-trivial problems we face while developing an SDK for
1. In client-server APIs, data is passed by value. To refer to some entities, specially designed identifiers need to be used. For example, if we have two sets of entities — recipes and offers — we need to build a map to understand which recipe corresponds to which offer: 1. In client-server APIs, data is passed by value. To refer to some entities, specially designed identifiers need to be used. For example, if we have two sets of entities — recipes and offers — we need to build a map to understand which recipe corresponds to which offer:
``` ```typescript
// Request 'lungo' and 'latte' recipes // Request 'lungo' and 'latte' recipes
const recipes = await api const recipes = await api
.getRecipes(['lungo', 'latte']); .getRecipes(['lungo', 'latte']);
@ -55,7 +55,7 @@ However, there are also non-trivial problems we face while developing an SDK for
This piece of code would be half as long if we received offers from the `api.search` SDK method with a *reference* to a recipe: This piece of code would be half as long if we received offers from the `api.search` SDK method with a *reference* to a recipe:
``` ```typescript
// Request 'lungo' and 'latte' recipes // Request 'lungo' and 'latte' recipes
const recipes = await api const recipes = await api
.getRecipes(['lungo', 'latte']); .getRecipes(['lungo', 'latte']);
@ -78,7 +78,7 @@ However, there are also non-trivial problems we face while developing an SDK for
2. Client-server APIs are typically decomposed so that one response contains data regarding one kind of entity. Even if the endpoint is composite (i.e., allows for combining data from different sources depending on parameters), it is still the developer's responsibility to use these parameters. The code sample from the previous example would be even shorter if the SDK allowed for the initialization of all related entities: 2. Client-server APIs are typically decomposed so that one response contains data regarding one kind of entity. Even if the endpoint is composite (i.e., allows for combining data from different sources depending on parameters), it is still the developer's responsibility to use these parameters. The code sample from the previous example would be even shorter if the SDK allowed for the initialization of all related entities:
``` ```typescript
// Request offers for latte and lungo // Request offers for latte and lungo
// in the vicinity // in the vicinity
const offers = await api.search({ const offers = await api.search({
@ -101,7 +101,7 @@ However, there are also non-trivial problems we face while developing an SDK for
3. Receiving callbacks in client-server APIs, even if it is a duplex communication channel, is rather inconvenient to work with and requires object mapping as well. Even if a push model is implemented, the resulting client code will be rather bulky: 3. Receiving callbacks in client-server APIs, even if it is a duplex communication channel, is rather inconvenient to work with and requires object mapping as well. Even if a push model is implemented, the resulting client code will be rather bulky:
``` ```typescript
// Retrieve ongoing orders // Retrieve ongoing orders
const orders = await api const orders = await api
.getOngoingOrders(); .getOngoingOrders();
@ -134,7 +134,7 @@ However, there are also non-trivial problems we face while developing an SDK for
Once again, we face a situation where an SDK lacking important features leads to mistakes in applications that use it. It would be much more convenient for a developer if an order object allowed for subscribing to its status updates without the need to learn how it works at the transport level and how to avoid missing an event. Once again, we face a situation where an SDK lacking important features leads to mistakes in applications that use it. It would be much more convenient for a developer if an order object allowed for subscribing to its status updates without the need to learn how it works at the transport level and how to avoid missing an event.
``` ```typescript
const order = await api const order = await api
.createOrder(…) .createOrder(…)
// No need to subscribe to // No need to subscribe to
@ -150,7 +150,7 @@ However, there are also non-trivial problems we face while developing an SDK for
4. Restoring after encountering business logic-bound errors is typically a complex procedure. As it can hardly be described in a machine-readable manner, client developers have to elaborate on the scenarios on their own. 4. Restoring after encountering business logic-bound errors is typically a complex procedure. As it can hardly be described in a machine-readable manner, client developers have to elaborate on the scenarios on their own.
``` ```typescript
// Request offers // Request offers
const offers = await api const offers = await api
.search(…); .search(…);

View File

@ -57,7 +57,7 @@ It is very easy to demonstrate how coupling several subject areas in one entity
But it is not the end of the story. If the developer still wants exactly this, i.e., to show a coffee shop chain icon (if any) on the order creation button, then what should they do? Following the same logic, we should provide an even more specialized possibility to do so. For example, we can adopt the following logic: if there is a `createOrderButtonIconUrl` property in the data, the icon will be taken from this field. Developers could customize the order creation button by overwriting this `createOrderButtonIconUrl` field for every search result: But it is not the end of the story. If the developer still wants exactly this, i.e., to show a coffee shop chain icon (if any) on the order creation button, then what should they do? Following the same logic, we should provide an even more specialized possibility to do so. For example, we can adopt the following logic: if there is a `createOrderButtonIconUrl` property in the data, the icon will be taken from this field. Developers could customize the order creation button by overwriting this `createOrderButtonIconUrl` field for every search result:
``` ```typescript
const searchBox = new SearchBox({ const searchBox = new SearchBox({
// For simplicity, let's allow // For simplicity, let's allow
// to override the search function // to override the search function

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,10 +1,10 @@
const { readFileSync } = require('fs'); import { readFileSync } from 'fs';
const { resolve } = require('path'); import { resolve } from 'path';
const escapeHtml = (str) => const escapeHtml = (str) =>
str.replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/&/g, '&amp;'); str.replace(/</g, '&lt;').replace(/>/g, '&gt;').replace(/&/g, '&amp;');
module.exports = { export const templates = {
pageBreak: '<div class="page-break"></div>', pageBreak: '<div class="page-break"></div>',
mainContent: (content) => `<section class="main-content"> mainContent: (content) => `<section class="main-content">