chore: prepare for initial release.

This commit is contained in:
James Harton 2024-08-12 20:31:40 +12:00
commit 1fb108838e
Signed by: james
GPG key ID: 90E82DAA13F624F4
48 changed files with 3275 additions and 0 deletions

397
.drone.yml Normal file
View file

@ -0,0 +1,397 @@
kind: pipeline
type: docker
name: build
steps:
- name: restore ASDF cache
image: meltwater/drone-cache
pull: "always"
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: restore build cache
image: meltwater/drone-cache
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
settings:
restore: true
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: install dependencies
image: harton.dev/james/asdf_container:latest
pull: "always"
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
depends_on:
- restore ASDF cache
- restore build cache
commands:
- asdf_install
- rm -rf .asdf/downloads
- . $ASDF_DIR/asdf.sh
- mix local.hex --if-missing --force
- mix local.rebar --if-missing --force
- mix deps.get
- mix deps.compile
- mix dialyzer --plt
- name: store ASDF cache
image: meltwater/drone-cache
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
depends_on:
- install dependencies
settings:
rebuild: true
override: false
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'asdf-{{ os }}-{{ arch }}-{{ checksum ".tool-versions" }}'
mount:
- .asdf
- name: store build cache
image: meltwater/drone-cache
environment:
AWS_ACCESS_KEY_ID:
from_secret: ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY:
from_secret: SECRET_ACCESS_KEY
AWS_PLUGIN_PATH_STYLE: true
depends_on:
- install dependencies
settings:
rebuild: true
override: false
endpoint:
from_secret: S3_ENDPOINT
bucket:
from_secret: CACHE_BUCKET
region: us-east-1
path-style: true
cache_key: 'elixir-{{ checksum "mix.lock" }}-{{ checksum ".tool-versions" }}'
mount:
- deps
- _build
- .hex
- .mix
- .rebar3
- name: mix compile
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- install dependencies
commands:
- asdf mix compile --warnings-as-errors
- name: mix test
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix test
- name: mix credo
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix credo --strict
- name: mix hex.audit
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix hex.audit
- name: mix format
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix format --check-formatted
- name: mix spark.formatter
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix spark.formatter --check
- name: mix spark.cheat_sheets
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix spark.cheat_sheets --check
- name: mix deps.unlock
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix deps.unlock --check-unused
- name: mix doctor
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- asdf mix doctor --full
- name: mix git_ops.check_message
image: harton.dev/james/asdf_container:latest
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
depends_on:
- mix compile
commands:
- git log -1 --format=%s > .last_commit_message
- asdf mix git_ops.check_message .last_commit_message
- name: mix git_ops.release
image: harton.dev/james/asdf_container:latest
when:
branch:
- main
event:
exclude:
- pull_request
depends_on:
- mix test
- mix credo
- mix hex.audit
- mix format
- mix spark.formatter
- mix spark.cheat_sheets
- mix deps.unlock
- mix doctor
- mix git_ops.check_message
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
DRONE_TOKEN:
from_secret: DRONE_TOKEN
commands:
- git fetch --tags
- . $ASDF_DIR/asdf.sh
- mix git_ops.project_info --format=shell > before.env
- mix git_ops.release --yes --no-major || true
- mix git_ops.project_info --format=shell > after.env
- . ./before.env
- export OLD_APP_VERSION=$${APP_VERSION}
- . ./after.env
- export NEW_APP_VERSION=$${APP_VERSION}
- if [ "v$${OLD_APP_VERSION}" != "v$${NEW_APP_VERSION}" ]; then
- export GIT_URL=$(echo $DRONE_GIT_HTTP_URL | sed -e "s/:\\/\\//:\\/\\/$DRONE_REPO_OWNER:$DRONE_TOKEN@/")
- git push $${GIT_URL} "HEAD:${DRONE_COMMIT_REF}" "refs/tags/v$${NEW_APP_VERSION}"
- fi
- name: build artifacts
image: harton.dev/james/asdf_container:latest
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- mix test
- mix credo
- mix hex.audit
- mix format
- mix spark.formatter
- mix spark.cheat_sheets
- mix deps.unlock
- mix doctor
- mix git_ops.check_message
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
commands:
- . $ASDF_DIR/asdf.sh
- mix git_ops.project_info --format=shell > app.env
- . ./app.env
- mkdir artifacts
- mix hex.build -o "artifacts/$${APP_NAME}-$${APP_VERSION}-pkg.tar"
- gzip "artifacts/$${APP_NAME}-$${APP_VERSION}-pkg.tar"
- mix docs
- tar zcvf "artifacts/$${APP_NAME}-$${APP_VERSION}-docs.tar.gz" doc/
- git tag -l --format='%(contents:subject)' v$${APP_VERSION} > tag_subject
- git tag -l --format='%(contents:body)' v$${APP_VERSION} > tag_body
- name: gitea release
image: plugins/gitea-release
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- build artifacts
settings:
api_key:
from_secret: DRONE_TOKEN
base_url: https://harton.dev
files: artifacts/*.tar.gz
checksum: sha256
title: tag_subject
note: tag_body
- name: docs release
when:
event:
- tag
refs:
include:
- refs/tags/v*
image: minio/mc
environment:
S3_ENDPOINT:
from_secret: S3_ENDPOINT
ACCESS_KEY:
from_secret: ACCESS_KEY_ID
SECRET_KEY:
from_secret: SECRET_ACCESS_KEY
depends_on:
- build artifacts
commands:
- mc alias set store $${S3_ENDPOINT} $${ACCESS_KEY} $${SECRET_KEY}
- mc mb -p store/docs.harton.nz
- mc mirror --overwrite doc/ store/docs.harton.nz/$${DRONE_REPO}/$${DRONE_TAG}
- mc mirror --overwrite doc/ store/docs.harton.nz/$${DRONE_REPO}
- name: hex release
image: harton.dev/james/asdf_container:latest
when:
event:
- tag
refs:
include:
- refs/tags/v*
depends_on:
- build artifacts
environment:
MIX_ENV: test
HEX_HOME: /drone/src/.hex
MIX_HOME: /drone/src/.mix
REBAR_BASE_DIR: /drone/src/.rebar3
ASDF_DATA_DIR: /drone/src/.asdf
ASDF_DIR: /root/.asdf
HEX_API_KEY:
from_secret: HEX_API_KEY
commands:
- . $ASDF_DIR/asdf.sh
- mix hex.publish --yes

74
.formatter.exs Normal file
View file

@ -0,0 +1,74 @@
# Used by "mix format"
spark_locals_without_parens = [
adapter: 1,
auth: 1,
aws_sigv4: 1,
base_url: 1,
body: 1,
cache: 1,
cache_dir: 1,
compress_body: 1,
connect_options: 1,
decode_body: 1,
decode_json: 1,
finch: 1,
finch_private: 1,
finch_request: 1,
form: 1,
form_multipart: 1,
headers: 1,
http_errors: 1,
inet6: 1,
into: 1,
json: 1,
max_redirects: 1,
max_retries: 1,
method: 1,
params: 1,
path_params: 1,
path_params_style: 1,
plug: 1,
pool_timeout: 1,
raw: 1,
receive_timeout: 1,
redirect: 1,
redirect_trusted: 1,
req_delete: 1,
req_delete: 2,
req_get: 1,
req_get: 2,
req_head: 1,
req_head: 2,
req_merge: 1,
req_merge: 2,
req_new: 1,
req_new: 2,
req_patch: 1,
req_patch: 2,
req_post: 1,
req_post: 2,
req_put: 1,
req_put: 2,
req_request: 1,
req_request: 2,
req_run: 1,
req_run: 2,
request: 1,
retry: 1,
retry_delay: 1,
retry_log_level: 1,
unix_socket: 1,
url: 1,
wait_for: 1,
wait_for: 2
]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
import_deps: [:plug, :reactor],
plugins: [Spark.Formatter],
locals_without_parens: spark_locals_without_parens,
export: [
locals_without_parens: spark_locals_without_parens
]
]

26
.gitignore vendored Normal file
View file

@ -0,0 +1,26 @@
# The directory Mix will write compiled artifacts to.
/_build/
# If you run "mix test --cover", coverage assets end up here.
/cover/
# The directory Mix downloads your dependencies sources to.
/deps/
# Where third-party dependencies like ExDoc output generated docs.
/doc/
# Ignore .fetch files in case you like to edit your project deps locally.
/.fetch
# If the VM crashes, it generates a dump, let's ignore it too.
erl_crash.dump
# Also ignore archive artifacts (built via "mix archive.build").
*.ez
# Ignore package tarball (built via "mix hex.build").
reactor_req-*.tar
# Temporary files, for example, from tests.
/tmp/

151
LICENSE.md Normal file
View file

@ -0,0 +1,151 @@
Copyright 2022 James Harton ("Licensor")
**HIPPOCRATIC LICENSE**
**Version 3.0, October 2021**
<https://firstdonoharm.dev/version/3/0/full.md>
**TERMS AND CONDITIONS**
TERMS AND CONDITIONS FOR USE, COPY, MODIFICATION, PREPARATION OF DERIVATIVE WORK, REPRODUCTION, AND DISTRIBUTION:
**[1.](#1) DEFINITIONS:**
_This section defines certain terms used throughout this license agreement._
[1.1.](#1.1) “License” means the terms and conditions, as stated herein, for use, copy, modification, preparation of derivative work, reproduction, and distribution of Software (as defined below).
[1.2.](#1.2) “Licensor” means the copyright and/or patent owner or entity authorized by the copyright and/or patent owner that is granting the License.
[1.3.](#1.3) “Licensee” means the individual or entity exercising permissions granted by this License, including the use, copy, modification, preparation of derivative work, reproduction, and distribution of Software (as defined below).
[1.4.](#1.4) “Software” means any copyrighted work, including but not limited to software code, authored by Licensor and made available under this License.
[1.5.](#1.5) “Supply Chain” means the sequence of processes involved in the production and/or distribution of a commodity, good, or service offered by the Licensee.
[1.6.](#1.6) “Supply Chain Impacted Party” or “Supply Chain Impacted Parties” means any person(s) directly impacted by any of Licensees Supply Chain, including the practices of all persons or entities within the Supply Chain prior to a good or service reaching the Licensee.
[1.7.](#1.7) “Duty of Care” is defined by its use in tort law, delict law, and/or similar bodies of law closely related to tort and/or delict law, including without limitation, a requirement to act with the watchfulness, attention, caution, and prudence that a reasonable person in the same or similar circumstances would use towards any Supply Chain Impacted Party.
[1.8.](#1.8) “Worker” is defined to include any and all permanent, temporary, and agency workers, as well as piece-rate, salaried, hourly paid, legal young (minors), part-time, night, and migrant workers.
**[2.](#2) INTELLECTUAL PROPERTY GRANTS:**
_This section identifies intellectual property rights granted to a Licensee_.
[2.1.](#2.1) _Grant of Copyright License_: Subject to the terms and conditions of this License, Licensor hereby grants to Licensee a worldwide, non-exclusive, no-charge, royalty-free copyright license to use, copy, modify, prepare derivative work, reproduce, or distribute the Software, Licensor authored modified software, or other work derived from the Software.
[2.2.](#2.2) _Grant of Patent License_: Subject to the terms and conditions of this License, Licensor hereby grants Licensee a worldwide, non-exclusive, no-charge, royalty-free patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer Software.
**[3.](#3) ETHICAL STANDARDS:**
_This section lists conditions the Licensee must comply with in order to have rights under this License._
The rights granted to the Licensee by this License are expressly made subject to the Licensees ongoing compliance with the following conditions:
* [3.1.](#3.1) The Licensee SHALL NOT, whether directly or indirectly, through agents or assigns:
* [3.1.1.](#3.1.1) Infringe upon any persons right to life or security of person, engage in extrajudicial killings, or commit murder, without lawful cause (See Article 3, _United Nations Universal Declaration of Human Rights_; Article 6, _International Covenant on Civil and Political Rights_)
* [3.1.2.](#3.1.2) Hold any person in slavery, servitude, or forced labor (See Article 4, _United Nations Universal Declaration of Human Rights_; Article 8, _International Covenant on Civil and Political Rights_);
* [3.1.3.](#3.1.3) Contribute to the institution of slavery, slave trading, forced labor, or unlawful child labor (See Article 4, _United Nations Universal Declaration of Human Rights_; Article 8, _International Covenant on Civil and Political Rights_);
* [3.1.4.](#3.1.4) Torture or subject any person to cruel, inhumane, or degrading treatment or punishment (See Article 5, _United Nations Universal Declaration of Human Rights_; Article 7, _International Covenant on Civil and Political Rights_);
* [3.1.5.](#3.1.5) Discriminate on the basis of sex, gender, sexual orientation, race, ethnicity, nationality, religion, caste, age, medical disability or impairment, and/or any other like circumstances (See Article 7, _United Nations Universal Declaration of Human Rights_; Article 2, _International Covenant on Economic, Social and Cultural Rights_; Article 26, _International Covenant on Civil and Political Rights_);
* [3.1.6.](#3.1.6) Prevent any person from exercising his/her/their right to seek an effective remedy by a competent court or national tribunal (including domestic judicial systems, international courts, arbitration bodies, and other adjudicating bodies) for actions violating the fundamental rights granted to him/her/them by applicable constitutions, applicable laws, or by this License (See Article 8, _United Nations Universal Declaration of Human Rights_; Articles 9 and 14, _International Covenant on Civil and Political Rights_);
* [3.1.7.](#3.1.7) Subject any person to arbitrary arrest, detention, or exile (See Article 9, _United Nations Universal Declaration of Human Rights_; Article 9, _International Covenant on Civil and Political Rights_);
* [3.1.8.](#3.1.8) Subject any person to arbitrary interference with a persons privacy, family, home, or correspondence without the express written consent of the person (See Article 12, _United Nations Universal Declaration of Human Rights_; Article 17, _International Covenant on Civil and Political Rights_);
* [3.1.9.](#3.1.9) Arbitrarily deprive any person of his/her/their property (See Article 17, _United Nations Universal Declaration of Human Rights_);
* [3.1.10.](#3.1.10) Forcibly remove indigenous peoples from their lands or territories or take any action with the aim or effect of dispossessing indigenous peoples from their lands, territories, or resources, including without limitation the intellectual property or traditional knowledge of indigenous peoples, without the free, prior, and informed consent of indigenous peoples concerned (See Articles 8 and 10, _United Nations Declaration on the Rights of Indigenous Peoples_);
* [3.1.11.](#3.1.11) _Fossil Fuel Divestment_: Be an individual or entity, or a representative, agent, affiliate, successor, attorney, or assign of an individual or entity, on the [FFI Solutions Carbon Underground 200 list](https://www.ffisolutions.com/research-analytics-index-solutions/research-screening/the-carbon-underground-200/?cn-reloaded=1);
* [3.1.12.](#3.1.12) _Ecocide_: Commit ecocide:
* [3.1.12.1.](#3.1.12.1) For the purpose of this section, “ecocide” means unlawful or wanton acts committed with knowledge that there is a substantial likelihood of severe and either widespread or long-term damage to the environment being caused by those acts;
* [3.1.12.2.](#3.1.12.2) For the purpose of further defining ecocide and the terms contained in the previous paragraph:
* [3.1.12.2.1.](#3.1.12.2.1) “Wanton” means with reckless disregard for damage which would be clearly excessive in relation to the social and economic benefits anticipated;
* [3.1.12.2.2.](#3.1.12.2.2) “Severe” means damage which involves very serious adverse changes, disruption, or harm to any element of the environment, including grave impacts on human life or natural, cultural, or economic resources;
* [3.1.12.2.3.](#3.1.12.2.3) “Widespread” means damage which extends beyond a limited geographic area, crosses state boundaries, or is suffered by an entire ecosystem or species or a large number of human beings;
* [3.1.12.2.4.](#3.1.12.2.4) “Long-term” means damage which is irreversible or which cannot be redressed through natural recovery within a reasonable period of time; and
* [3.1.12.2.5.](#3.1.12.2.5) “Environment” means the earth, its biosphere, cryosphere, lithosphere, hydrosphere, and atmosphere, as well as outer space
(See Section II, _Independent Expert Panel for the Legal Definition of Ecocide_, Stop Ecocide Foundation and the Promise Institute for Human Rights at UCLA School of Law, June 2021);
* [3.1.13.](#3.1.13) _Extractive Industries_: Be an individual or entity, or a representative, agent, affiliate, successor, attorney, or assign of an individual or entity, that engages in fossil fuel or mineral exploration, extraction, development, or sale;
* [3.1.14.](#3.1.14) _Boycott / Divestment / Sanctions_: Be an individual or entity, or a representative, agent, affiliate, successor, attorney, or assign of an individual or entity, identified by the Boycott, Divestment, Sanctions (“BDS”) movement on its website (<https://bdsmovement.net/> and <https://bdsmovement.net/get-involved/what-to-boycott>) as a target for boycott;
* [3.1.15.](#3.1.15) _Taliban_: Be an individual or entity that:
* [3.1.15.1.](#3.1.15.1) engages in any commercial transactions with the Taliban; or
* [3.1.15.2.](#3.1.15.2) is a representative, agent, affiliate, successor, attorney, or assign of the Taliban;
* [3.1.16.](#3.1.16) _Myanmar_: Be an individual or entity that:
* [3.1.16.1.](#3.1.16.1) engages in any commercial transactions with the Myanmar/Burmese military junta; or
* [3.1.16.2.](#3.1.16.2) is a representative, agent, affiliate, successor, attorney, or assign of the Myanmar/Burmese government;
* [3.1.17.](#3.1.17) _Xinjiang Uygur Autonomous Region_: Be an individual or entity, or a representative, agent, affiliate, successor, attorney, or assign of any individual or entity, that does business in, purchases goods from, or otherwise benefits from goods produced in the Xinjiang Uygur Autonomous Region of China;
* [3.1.18.](#3.1.18) _US Tariff Act_: Be an individual or entity:
* [3.1.18.1.](#3.1.18.1) which U.S. Customs and Border Protection (CBP) has currently issued a Withhold Release Order (WRO) or finding against based on reasonable suspicion of forced labor; or
* [3.1.18.2.](#3.1.18.2) that is a representative, agent, affiliate, successor, attorney, or assign of an individual or entity that does business with an individual or entity which currently has a WRO or finding from CBP issued against it based on reasonable suspicion of forced labor;
* [3.1.19.](#3.1.19) _Mass Surveillance_: Be a government agency or multinational corporation, or a representative, agent, affiliate, successor, attorney, or assign of a government or multinational corporation, which participates in mass surveillance programs;
* [3.1.20.](#3.1.20) _Military Activities_: Be an entity or a representative, agent, affiliate, successor, attorney, or assign of an entity which conducts military activities;
* [3.1.21.](#3.1.21) _Law Enforcement_: Be an individual or entity, or a or a representative, agent, affiliate, successor, attorney, or assign of an individual or entity, that provides good or services to, or otherwise enters into any commercial contracts with, any local, state, or federal law enforcement agency;
* [3.1.22.](#3.1.22) _Media_: Be an individual or entity, or a or a representative, agent, affiliate, successor, attorney, or assign of an individual or entity, that broadcasts messages promoting killing, torture, or other forms of extreme violence;
* [3.1.23.](#3.1.23) Interfere with Workers' free exercise of the right to organize and associate (See Article 20, United Nations Universal Declaration of Human Rights; C087 - Freedom of Association and Protection of the Right to Organise Convention, 1948 (No. 87), International Labour Organization; Article 8, International Covenant on Economic, Social and Cultural Rights); and
* [3.1.24.](#3.1.24) Harm the environment in a manner inconsistent with local, state, national, or international law.
* [3.2.](#3.2) The Licensee SHALL:
* [3.2.1.](#3.2.1) _Social Auditing_: Only use social auditing mechanisms that adhere to Worker-Driven Social Responsibility Networks Statement of Principles (<https://wsr-network.org/what-is-wsr/statement-of-principles/>) over traditional social auditing mechanisms, to the extent the Licensee uses any social auditing mechanisms at all;
* [3.2.2.](#3.2.2) _Workers on Board of Directors_: Ensure that if the Licensee has a Board of Directors, 30% of Licensees board seats are held by Workers paid no more than 200% of the compensation of the lowest paid Worker of the Licensee;
* [3.2.3.](#3.2.3) _Supply Chain_: Provide clear, accessible supply chain data to the public in accordance with the following conditions:
* [3.2.3.1.](#3.2.3.1) All data will be on Licensees website and/or, to the extent Licensee is a representative, agent, affiliate, successor, attorney, subsidiary, or assign, on Licensees principals or parents website or some other online platform accessible to the public via an internet search on a common internet search engine; and
* [3.2.3.2.](#3.2.3.2) Data published will include, where applicable, manufacturers, top tier suppliers, subcontractors, cooperatives, component parts producers, and farms;
* [3.2.4.](#3.2.4) Provide equal pay for equal work where the performance of such work requires equal skill, effort, and responsibility, and which are performed under similar working conditions, except where such payment is made pursuant to:
* [3.2.4.1.](#3.2.4.1) A seniority system;
* [3.2.4.2.](#3.2.4.2) A merit system;
* [3.2.4.3.](#3.2.4.3) A system which measures earnings by quantity or quality of production; or
* [3.2.4.4.](#3.2.4.4) A differential based on any other factor other than sex, gender, sexual orientation, race, ethnicity, nationality, religion, caste, age, medical disability or impairment, and/or any other like circumstances (See 29 U.S.C.A. § 206(d)(1); Article 23, _United Nations Universal Declaration of Human Rights_; Article 7, _International Covenant on Economic, Social and Cultural Rights_; Article 26, _International Covenant on Civil and Political Rights_); and
* [3.2.5.](#3.2.5) Allow for reasonable limitation of working hours and periodic holidays with pay (See Article 24, _United Nations Universal Declaration of Human Rights_; Article 7, _International Covenant on Economic, Social and Cultural Rights_).
**[4.](#4) SUPPLY CHAIN IMPACTED PARTIES:**
_This section identifies additional individuals or entities that a Licensee could harm as a result of violating the Ethical Standards section, the condition that the Licensee must voluntarily accept a Duty of Care for those individuals or entities, and the right to a private right of action that those individuals or entities possess as a result of violations of the Ethical Standards section._
[4.1.](#4.1) In addition to the above Ethical Standards, Licensee voluntarily accepts a Duty of Care for Supply Chain Impacted Parties of this License, including individuals and communities impacted by violations of the Ethical Standards. The Duty of Care is breached when a provision within the Ethical Standards section is violated by a Licensee, one of its successors or assigns, or by an individual or entity that exists within the Supply Chain prior to a good or service reaching the Licensee.
[4.2.](#4.2) Breaches of the Duty of Care, as stated within this section, shall create a private right of action, allowing any Supply Chain Impacted Party harmed by the Licensee to take legal action against the Licensee in accordance with applicable negligence laws, whether they be in tort law, delict law, and/or similar bodies of law closely related to tort and/or delict law, regardless if Licensee is directly responsible for the harms suffered by a Supply Chain Impacted Party. Nothing in this section shall be interpreted to include acts committed by individuals outside of the scope of his/her/their employment.
[5.](#5) **NOTICE:** _This section explains when a Licensee must notify others of the License._
[5.1.](#5.1) _Distribution of Notice_: Licensee must ensure that everyone who receives a copy of or uses any part of Software from Licensee, with or without changes, also receives the License and the copyright notice included with Software (and if included by the Licensor, patent, trademark, and attribution notice). Licensee must ensure that License is prominently displayed so that any individual or entity seeking to download, copy, use, or otherwise receive any part of Software from Licensee is notified of this License and its terms and conditions. Licensee must cause any modified versions of the Software to carry prominent notices stating that Licensee changed the Software.
[5.2.](#5.2) _Modified Software_: Licensee is free to create modifications of the Software and distribute only the modified portion created by Licensee, however, any derivative work stemming from the Software or its code must be distributed pursuant to this License, including this Notice provision.
[5.3.](#5.3) _Recipients as Licensees_: Any individual or entity that uses, copies, modifies, reproduces, distributes, or prepares derivative work based upon the Software, all or part of the Softwares code, or a derivative work developed by using the Software, including a portion of its code, is a Licensee as defined above and is subject to the terms and conditions of this License.
**[6.](#6) REPRESENTATIONS AND WARRANTIES:**
[6.1.](#6.1) _Disclaimer of Warranty_: TO THE FULL EXTENT ALLOWED BY LAW, THIS SOFTWARE COMES “AS IS,” WITHOUT ANY WARRANTY, EXPRESS OR IMPLIED, AND LICENSOR SHALL NOT BE LIABLE TO ANY PERSON OR ENTITY FOR ANY DAMAGES OR OTHER LIABILITY ARISING FROM, OUT OF, OR IN CONNECTION WITH THE SOFTWARE OR THIS LICENSE, UNDER ANY LEGAL CLAIM.
[6.2.](#6.2) _Limitation of Liability_: LICENSEE SHALL HOLD LICENSOR HARMLESS AGAINST ANY AND ALL CLAIMS, DEBTS, DUES, LIABILITIES, LIENS, CAUSES OF ACTION, DEMANDS, OBLIGATIONS, DISPUTES, DAMAGES, LOSSES, EXPENSES, ATTORNEYS' FEES, COSTS, LIABILITIES, AND ALL OTHER CLAIMS OF EVERY KIND AND NATURE WHATSOEVER, WHETHER KNOWN OR UNKNOWN, ANTICIPATED OR UNANTICIPATED, FORESEEN OR UNFORESEEN, ACCRUED OR UNACCRUED, DISCLOSED OR UNDISCLOSED, ARISING OUT OF OR RELATING TO LICENSEES USE OF THE SOFTWARE. NOTHING IN THIS SECTION SHOULD BE INTERPRETED TO REQUIRE LICENSEE TO INDEMNIFY LICENSOR, NOR REQUIRE LICENSOR TO INDEMNIFY LICENSEE.
**[7.](#7) TERMINATION**
[7.1.](#7.1) _Violations of Ethical Standards or Breaching Duty of Care_: If Licensee violates the Ethical Standards section or Licensee, or any other person or entity within the Supply Chain prior to a good or service reaching the Licensee, breaches its Duty of Care to Supply Chain Impacted Parties, Licensee must remedy the violation or harm caused by Licensee within 30 days of being notified of the violation or harm. If Licensee fails to remedy the violation or harm within 30 days, all rights in the Software granted to Licensee by License will be null and void as between Licensor and Licensee.
[7.2.](#7.2) _Failure of Notice_: If any person or entity notifies Licensee in writing that Licensee has not complied with the Notice section of this License, Licensee can keep this License by taking all practical steps to comply within 30 days after the notice of noncompliance. If Licensee does not do so, Licensees License (and all rights licensed hereunder) will end immediately.
[7.3.](#7.3) _Judicial Findings_: In the event Licensee is found by a civil, criminal, administrative, or other court of competent jurisdiction, or some other adjudicating body with legal authority, to have committed actions which are in violation of the Ethical Standards or Supply Chain Impacted Party sections of this License, all rights granted to Licensee by this License will terminate immediately.
[7.4.](#7.4) _Patent Litigation_: If Licensee institutes patent litigation against any entity (including a cross-claim or counterclaim in a suit) alleging that the Software, all or part of the Softwares code, or a derivative work developed using the Software, including a portion of its code, constitutes direct or contributory patent infringement, then any patent license, along with all other rights, granted to Licensee under this License will terminate as of the date such litigation is filed.
[7.5.](#7.5) _Additional Remedies_: Termination of the License by failing to remedy harms in no way prevents Licensor or Supply Chain Impacted Party from seeking appropriate remedies at law or in equity.
**[8.](#8) MISCELLANEOUS:**
[8.1.](#8.1) _Conditions_: Sections 3, 4.1, 5.1, 5.2, 7.1, 7.2, 7.3, and 7.4 are conditions of the rights granted to Licensee in the License.
[8.2.](#8.2) _Equitable Relief_: Licensor and any Supply Chain Impacted Party shall be entitled to equitable relief, including injunctive relief or specific performance of the terms hereof, in addition to any other remedy to which they are entitled at law or in equity.
[8.3.](#8.3) _Copyleft_: Modified software, source code, or other derivative work must be licensed, in its entirety, under the exact same conditions as this License.
[8.4.](#8.4) _Severability_: If any term or provision of this License is determined to be invalid, illegal, or unenforceable by a court of competent jurisdiction, any such determination of invalidity, illegality, or unenforceability shall not affect any other term or provision of this License or invalidate or render unenforceable such term or provision in any other jurisdiction. If the determination of invalidity, illegality, or unenforceability by a court of competent jurisdiction pertains to the terms or provisions contained in the Ethical Standards section of this License, all rights in the Software granted to Licensee shall be deemed null and void as between Licensor and Licensee.
[8.5.](#8.5) _Section Titles_: Section titles are solely written for organizational purposes and should not be used to interpret the language within each section.
[8.6.](#8.6) _Citations_: Citations are solely written to provide context for the source of the provisions in the Ethical Standards.
[8.7.](#8.7) _Section Summaries_: Some sections have a brief _italicized description_ which is provided for the sole purpose of briefly describing the section and should not be used to interpret the terms of the License.
[8.8.](#8.8) _Entire License_: This is the entire License between the Licensor and Licensee with respect to the claims released herein and that the consideration stated herein is the only consideration or compensation to be paid or exchanged between them for this License. This License cannot be modified or amended except in a writing signed by Licensor and Licensee.
[8.9.](#8.9) _Successors and Assigns_: This License shall be binding upon and inure to the benefit of the Licensors and Licensees respective heirs, successors, and assigns.

83
README.md Normal file
View file

@ -0,0 +1,83 @@
# Reactor.Req
[![Build Status](https://drone.harton.dev/api/badges/james/reactor_req/status.svg)](https://drone.harton.dev/james/reactor_req)
[![Hex.pm](https://img.shields.io/hexpm/v/reactor_req.svg)](https://hex.pm/packages/reactor_req)
[![Hippocratic License HL3-FULL](https://img.shields.io/static/v1?label=Hippocratic%20License&message=HL3-FULL&labelColor=5e2751&color=bc8c3d)](https://firstdonoharm.dev/version/3/0/full.html)
A [Reactor](https://github.com/ash-project/reactor) extension that provides steps for working with HTTP requests via [Req](https://github.com/wojtekmach/req).
## Example
The following example uses Reactor to retrieve the repository description from the Forgejo API:
```elixir
defmodule GetForgejoRepoDescription do
use Reactor, extensions: [Reactor.Req]
input :hostname
input :owner
input :repo
step :repo_url do
argument :hostname, input(:hostname)
argument :owner, input(:owner)
argument :repo, input(:repo)
run fn args ->
URI.new("https://#{args.hostname}/api/v1/repos/#{args.owner}/#{args.repo}")
end
end
req_get :get_repo do
url result(:repo_url)
headers value([accept: "application/json"])
http_errors value(:raise)
end
step :get_description do
argument :description, result(:get_repo, [:body, "description"])
run fn args -> {:ok, args.description} end
end
end
Reactor.run!(GetForgejoRepoDescription, %{
hostname: "harton.dev",
owner: "james",
repo: "reactor_req"
})
# => "A Reactor DSL extension for making HTTP requests with Req."
```
## Installation
If [available in Hex](https://hex.pm/docs/publish), the package can be installed
by adding `reactor_req` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:reactor_req, "~> 0.1.0"}
]
end
```
Documentation for the latest release is available on [HexDocs](https://hexdocs.pm/reactor_req).
## Github Mirror
This repository is mirrored [on Github](https://github.com/jimsynz/reactor_req)
from it's primary location [on my Forgejo instance](https://harton.dev/james/reactor_req).
Feel free to raise issues and open PRs on Github.
## License
This software is licensed under the terms of the
[HL3-FULL](https://firstdonoharm.dev), see the `LICENSE.md` file included with
this package for the terms.
This license actively proscribes this software being used by and for some
industries, countries and activities. If your usage of this software doesn't
comply with the terms of this license, then [contact me](mailto:james@harton.nz)
with the details of your use-case to organise the purchase of a license - the
cost of which may include a donation to a suitable charity or NGO.

13
config/config.exs Normal file
View file

@ -0,0 +1,13 @@
import Config
config :spark, formatter: [remove_parens?: true]
if Mix.env() == :dev do
config :git_ops,
mix_project: Reactor.Req.MixProject,
changelog_file: "CHANGELOG.md",
repository_url: "https://harton.dev/james/reactor_req",
manage_mix_version?: true,
manage_readme_version: true,
version_tag_prefix: "v"
end

File diff suppressed because it is too large Load diff

27
lib/reactor/req.ex Normal file
View file

@ -0,0 +1,27 @@
defmodule Reactor.Req do
@moduledoc """
An extension which provides direct support for working with `req` requests
within Reactor.
"""
use Spark.Dsl.Extension,
dsl_patches:
[
Reactor.Req.Dsl.Delete,
Reactor.Req.Dsl.Get,
Reactor.Req.Dsl.Head,
Reactor.Req.Dsl.Merge,
Reactor.Req.Dsl.New,
Reactor.Req.Dsl.Patch,
Reactor.Req.Dsl.Post,
Reactor.Req.Dsl.Put,
Reactor.Req.Dsl.Request,
Reactor.Req.Dsl.Run
]
|> Enum.map(
&%Spark.Dsl.Patch.AddEntity{
section_path: [:reactor],
entity: &1.__entity__()
}
)
end

View file

@ -0,0 +1,21 @@
defmodule Reactor.Req.Builder do
@moduledoc "Common builder for all DSL entities"
import Reactor.Template, only: :macros
alias Reactor.{Argument, Builder, Req.Step}
@doc false
def build(req, fun, reactor) do
arguments =
req
|> Map.from_struct()
|> Map.drop([:__identifier__, :arguments, :name])
|> Enum.reject(&is_nil(elem(&1, 1)))
|> Enum.map(fn
{name, template} when is_template(template) ->
%Argument{name: name, source: template}
end)
Builder.add_step(reactor, req.name, {Step, fun: fun}, arguments, ref: :step_name)
end
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Delete do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :delete, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Get do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :get, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Head do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :head, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Merge do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :merge, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.New do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :new, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Patch do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :patch, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Post do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :post, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Put do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :put, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Request do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :request, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,5 @@
defimpl Reactor.Dsl.Build, for: Reactor.Req.Dsl.Run do
@moduledoc false
def build(req, reactor), do: Reactor.Req.Builder.build(req, :run, reactor)
def verify(_, _), do: :ok
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Delete do
@moduledoc """
A `req_delete` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_delete,
describe: """
Performs a request using `Req.delete/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Get do
@moduledoc """
A `req_get` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_get,
describe: """
Performs a request using `Req.get/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Head do
@moduledoc """
A `req_head` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_head,
describe: """
Performs a request using `Req.head/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Merge do
@moduledoc """
The `req_merge` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_merge,
describe: """
Creates a new request using `Req.merge/2`
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.New do
@moduledoc """
The `req_new` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs() |> Keyword.delete(:request)
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_new,
describe: """
Creates a new request using `Req.new/1`
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,126 @@
defmodule Reactor.Req.Dsl.Options do
@moduledoc """
All the known options of Req (as of this writing).
"""
alias Reactor.Template
@options [
adapter: "Adapter to use to make the actual HTTP request",
auth: "Sets request authentication",
aws_sigv4: "If set, the AWS options to sign request",
base_url: "If set, the request URL is prepended with this base URL",
body: "The request body",
cache_dir: "The directory to store the cache",
cache: "If `true`, performs HTTP caching",
compress_body: "If set to `true`, compresses the request body using gzip",
connect_options:
"Dynamically starts (or re-uses already started) Finch pool with the given connection options",
decode_body: "If set to `false`, disables automatic response body decoding",
decode_json: "Options to pass to `Jason.decode!/2`",
finch_private: "A map or keyword list of private metadata to add to the Finch request",
finch_request:
"A function that executes the Finch request, defaults to using `Finch.request/3`",
finch: "The Finch pool to use. Defaults to pool automatically started by `Req`",
form_multipart: "If set, encodes the request body as `multipart/form-data`",
form: "If set, encodes the request body as `application/x-www-form-urlencoded`",
headers: "The request headers as a `{key, value}` enumerable (e.g. map, keyword list)",
http_errors: "How to manage 4xx and 5xx responses",
inet6: "If set to `true`, uses IPv6",
into: "Where to send the response body",
json: "If set, encodes the request body as JSON",
max_redirects: "The maximum number of redirects, defaults to `10`",
max_retries:
"Maximum number of retry attempts, defaults to `3` (for a total of `4` requests to the server, including the initial one)",
method: "The request method, defaults to `:get`",
params: "If set, appends parameters to the request query string (via `put_params` step)",
path_params_style: "How path params are expressed (via `put_path_params` step)",
path_params: "If set, uses a templated request path (via `put_path_params` step)",
plug:
"If set, calls the given plug instead of making an HTTP request over the network (via `run_plug` step)",
pool_timeout: "Pool checkout timeout in milliseconds, defaults to `5000`",
raw:
"If set to `true`, disables automatic body decompression (`decompress_body` step) and decoding (`decode_body` step)",
receive_timeout: "Socket receive timeout in milliseconds, defaults to `15_000`",
redirect_trusted:
"By default, authorization credentials are only sent on redirects with the same host, scheme and port. If `:redirect_trusted` is set to `true`, credentials will be sent to any host",
redirect: "If set to `false`, disables automatic response redirects",
request: "A previously built request",
retry_delay:
"If not set, which is the default, the retry delay is determined by the value of retry-delay header on HTTP 429/503 responses. If the header is not set, the default delay follows a simple exponential backoff: 1s, 2s, 4s, 8s, ...",
retry_log_level:
"The log level to emit retry logs at. Can also be set to `false` to disable logging these messages. Defaults to `:warning`",
retry: "One of `:safe_transient` (default), `:transient`, `fun` or `false`",
unix_socket: "If set, connect through the given UNIX domain socket",
url: "The request URL"
]
@type entity :: %{
struct: module,
__identifier__: any,
name: atom,
method: nil | Template.t(),
url: nil | Template.t(),
headers: nil | Template.t(),
http_errors: nil | Template.t(),
body: nil | Template.t(),
base_url: nil | Template.t(),
params: nil | Template.t(),
path_param_style: nil | Template.t(),
auth: nil | Template.t(),
form: nil | Template.t(),
form_multipart: nil | Template.t(),
json: nil | Template.t(),
compress_body: Template.t(),
aws_sigv4: nil | Template.t(),
compressed: nil | Template.t(),
raw: nil | Template.t(),
decode_body: nil | Template.t(),
decode_json: nil | Template.t(),
into: nil | Template.t(),
redirect: nil | Template.t(),
redirect_trusted: nil | Template.t(),
max_redirects: nil | Template.t(),
retry: nil | Template.t(),
retry_delay: nil | Template.t(),
retry_log_level: nil | Template.t(),
max_retries: nil | Template.t(),
cache: nil | Template.t(),
cache_dir: nil | Template.t(),
adapter: nil | Template.t(),
plug: nil | Template.t(),
finch: nil | Template.t(),
connect_options: nil | Template.t(),
inet6: nil | Template.t(),
pool_timeout: nil | Template.t(),
receive_timeout: nil | Template.t(),
unix_socket: nil | Template.t(),
finch_private: nil | Template.t(),
finch_request: nil | Template.t()
}
@doc "Merge options"
@spec merge(Keyword.t()) :: Keyword.t()
def merge(overrides) do
@options
|> Enum.map(fn {name, doc} ->
{name,
[
type: {:or, [nil, Template.type()]},
required: false,
doc: doc
]}
end)
|> Keyword.merge(overrides)
end
@doc "Struct attributes"
def struct_attrs do
@options
|> Keyword.keys()
|> Enum.map(&{&1, nil})
|> Keyword.put(:__identifier__, nil)
|> Keyword.put(:arguments, [])
|> Keyword.put(:name, nil)
end
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Patch do
@moduledoc """
A `req_patch` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_patch,
describe: """
Performs a request using `Req.patch/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Post do
@moduledoc """
A `req_post` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_post,
describe: """
Performs a request using `Req.post/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Put do
@moduledoc """
A `req_put` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_put,
describe: """
Performs a request using `Req.put/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Request do
@moduledoc """
A `req_run` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_request,
describe: """
Performs a request using `Req.request/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

View file

@ -0,0 +1,37 @@
defmodule Reactor.Req.Dsl.Run do
@moduledoc """
A `req_run` DSL entity for the `Reactor.Req` DSL extension.
"""
alias Reactor.Dsl.WaitFor
alias Reactor.Req.Dsl.Options
defstruct Options.struct_attrs()
@type t :: Options.entity()
@doc false
def __entity__,
do: %Spark.Dsl.Entity{
name: :req_run,
describe: """
Performs a request using `Req.run/2`.
Note that Reactor doesn't validate any options - it simply passes them to the underlying `Req` function and assumes it will validate them.
""",
target: __MODULE__,
identifier: :name,
imports: [Reactor.Dsl.Argument],
args: [:name],
recursive_as: :steps,
entities: [arguments: [WaitFor.__entity__()]],
schema:
Options.merge(
name: [
type: :atom,
required: true,
doc:
"A unique name for the step. Used when choosing the return value of the Reactor and for arguments into other steps"
]
)
}
end

30
lib/reactor/req/ext.ex Normal file
View file

@ -0,0 +1,30 @@
defmodule Reactor.Req.Ext do
@moduledoc false
# Don't try and use this extension. It's just here to work around a problem
# with spark.formatter and dsl patches.
use Spark.Dsl.Extension,
sections: [
%Spark.Dsl.Section{
name: :reactor,
top_level?: true,
entities:
Enum.map(
[
Reactor.Req.Dsl.Delete,
Reactor.Req.Dsl.Get,
Reactor.Req.Dsl.Head,
Reactor.Req.Dsl.Merge,
Reactor.Req.Dsl.New,
Reactor.Req.Dsl.Patch,
Reactor.Req.Dsl.Post,
Reactor.Req.Dsl.Put,
Reactor.Req.Dsl.Request,
Reactor.Req.Dsl.Run
],
& &1.__entity__()
)
}
]
end

101
lib/reactor/req/step.ex Normal file
View file

@ -0,0 +1,101 @@
defmodule Reactor.Req.Step do
@moduledoc """
A step which delegates to `req`.
"""
use Reactor.Step
@doc false
@impl true
@spec run(Reactor.inputs(), Reactor.context(), keyword) :: {:ok | :error, any}
def run(arguments, _context, options) do
fun =
Keyword.fetch!(options, :fun)
arguments =
arguments
|> Enum.reject(&is_nil(elem(&1, 1)))
do_run(arguments, fun)
end
defp do_run(arguments, :delete) do
{request, options} = Keyword.pop(arguments, :request)
if request do
Req.delete(request, options)
else
Req.delete(options)
end
end
defp do_run(arguments, :get) do
{request, options} = Keyword.pop(arguments, :request)
if request do
Req.get(request, options)
else
Req.get(options)
end
end
defp do_run(arguments, :head) do
{request, options} = Keyword.pop(arguments, :request)
if request do
Req.head(request, options)
else
Req.head(options)
end
end
defp do_run(arguments, :merge) do
{request, options} = Keyword.pop(arguments, :request)
{:ok, Req.merge(request, options)}
end
defp do_run(arguments, :new) do
options = Keyword.delete(arguments, :request)
{:ok, Req.new(options)}
end
defp do_run(arguments, :patch) do
{request, options} = Keyword.pop(arguments, :request)
if request do
Req.patch(request, options)
else
Req.patch(options)
end
end
defp do_run(arguments, :post) do
{request, options} = Keyword.pop(arguments, :request)
if request do
Req.post(request, options)
else
Req.post(options)
end
end
defp do_run(arguments, :put) do
{request, options} = Keyword.pop(arguments, :request)
if request do
Req.put(request, options)
else
Req.put(options)
end
end
defp do_run(arguments, :request) do
{request, options} = Keyword.pop(arguments, :request)
Req.request(request, options)
end
defp do_run(arguments, :run) do
{request, options} = Keyword.pop(arguments, :request)
{:ok, Req.run(request, options)}
end
end

123
mix.exs Normal file
View file

@ -0,0 +1,123 @@
defmodule Reactor.Req.MixProject do
@moduledoc """
A Reactor extension which provides steps for working with `Req`.
"""
@version "0.1.0"
use Mix.Project
def project do
[
aliases: aliases(),
app: :reactor_req,
consolidate_protocols: Mix.env() != :dev,
deps: deps(),
description: @moduledoc,
dialyzer: [plt_add_apps: [:bandit]],
docs: docs(),
elixir: "~> 1.17",
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
source_url: "https://harton.dev/james/reactor_req",
homepage_url: "https://harton.dev/james/reactor_req",
start_permanent: Mix.env() == :prod,
version: @version
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
name: :reactor_req,
files: ~w[lib .formatter.exs mix.exs README* LICENSE* CHANGELOG* documentation],
licenses: ["HL3-FULL"],
links: %{
"Source" => "https://harton.dev/james/reactor_req",
"GitHub" => "https://github.com/jimsynz/reactor_req",
"Changelog" => "https://harton.dev/james/reactor_req/src/branch/main/CHANGELOG.md",
"Sponsor" => "https://github.com/sponsors/jimsynz"
},
maintainers: [
"James Harton <james@harton.nz>"
],
source_url: "https://harton.dev/james/reactor_req"
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:bandit, "~> 1.5", only: ~w[dev test]a, runtime: false},
{:credo, "~> 1.0", only: ~w[dev test]a, runtime: false},
{:dialyxir, "~> 1.0", only: ~w[dev test]a, runtime: false},
{:doctor, "~> 0.21", only: ~w[dev test]a, runtime: false},
{:ex_check, "~> 0.16", only: ~w[dev test]a, runtime: false},
{:ex_doc, "~> 0.34", only: ~w[dev test]a, runtime: false},
{:git_ops, "~> 2.6", only: ~w[dev test]a, runtime: false},
{:plug, "~> 1.16", only: ~w[dev test]a, runtime: false},
{:igniter, "~> 0.2"},
{:reactor, "~> 0.9"},
{:req, "~> 0.5"},
{:spark, "~> 2.0"}
]
end
defp aliases do
[
credo: "credo --strict",
docs: [
"spark.cheat_sheets",
"docs",
"spark.cheat_sheets_in_search",
"spark.replace_doc_links"
],
"spark.formatter": "spark.formatter --extensions Reactor.Req.Ext",
"spark.cheat_sheets": "spark.cheat_sheets --extensions Reactor.Req",
"spark.cheat_sheets_in_search": "spark.cheat_sheets_in_search --extensions Reactor.Req"
]
end
defp docs do
[
extras: extra_documentation(),
extra_section: "GUIDES",
formatters: ["html"],
filter_modules: ~r/^Elixir\.Reactor/,
groups_for_extras: extra_documentation_groups(),
main: "readme",
source_url_pattern: "https://harton.dev/james/reactor_req/src/branch/main/%{path}#L%{line}",
spark: [
extension: [
%{
module: Reactor.Req,
name: "Reactor.Req",
target: "Reactor",
type: "Reactor"
}
]
]
]
end
defp extra_documentation do
["README.md"]
|> Enum.concat(Path.wildcard("documentation/**/*.{md,livemd,cheatmd}"))
|> Enum.map(&{String.to_atom(&1), []})
end
defp extra_documentation_groups do
[
DSLs: ~r'documentation/dsls'
]
end
defp elixirc_paths(env) when env in ~w[dev test]a, do: ~w[lib test/support]
defp elixirc_paths(_), do: ~w[lib]
end

46
mix.lock Normal file
View file

@ -0,0 +1,46 @@
%{
"bandit": {:hex, :bandit, "1.5.7", "6856b1e1df4f2b0cb3df1377eab7891bec2da6a7fd69dc78594ad3e152363a50", [:mix], [{:hpax, "~> 1.0.0", [hex: :hpax, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:thousand_island, "~> 1.0", [hex: :thousand_island, repo: "hexpm", optional: false]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "f2dd92ae87d2cbea2fa9aa1652db157b6cba6c405cb44d4f6dd87abba41371cd"},
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
"castore": {:hex, :castore, "1.0.8", "dedcf20ea746694647f883590b82d9e96014057aff1d44d03ec90f36a5c0dc6e", [:mix], [], "hexpm", "0b2b66d2ee742cb1d9cb8c8be3b43c3a70ee8651f37b75a8b982e036752983f1"},
"credo": {:hex, :credo, "1.7.7", "771445037228f763f9b2afd612b6aa2fd8e28432a95dbbc60d8e03ce71ba4446", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8bc87496c9aaacdc3f90f01b7b0582467b69b4bd2441fe8aae3109d843cc2f2e"},
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
"dialyxir": {:hex, :dialyxir, "1.4.3", "edd0124f358f0b9e95bfe53a9fcf806d615d8f838e2202a9f430d59566b6b53b", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "bf2cfb75cd5c5006bec30141b131663299c661a864ec7fbbc72dfa557487a986"},
"doctor": {:hex, :doctor, "0.21.0", "20ef89355c67778e206225fe74913e96141c4d001cb04efdeba1a2a9704f1ab5", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "a227831daa79784eb24cdeedfa403c46a4cb7d0eab0e31232ec654314447e4e0"},
"earmark_parser": {:hex, :earmark_parser, "1.4.41", "ab34711c9dc6212dda44fcd20ecb87ac3f3fce6f0ca2f28d4a00e4154f8cd599", [:mix], [], "hexpm", "a81a04c7e34b6617c2792e291b5a2e57ab316365c2644ddc553bb9ed863ebefa"},
"erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"},
"ex_check": {:hex, :ex_check, "0.16.0", "07615bef493c5b8d12d5119de3914274277299c6483989e52b0f6b8358a26b5f", [:mix], [], "hexpm", "4d809b72a18d405514dda4809257d8e665ae7cf37a7aee3be6b74a34dec310f5"},
"ex_doc": {:hex, :ex_doc, "0.34.2", "13eedf3844ccdce25cfd837b99bea9ad92c4e511233199440488d217c92571e8", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "5ce5f16b41208a50106afed3de6a2ed34f4acfd65715b82a0b84b49d995f95c1"},
"file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"},
"finch": {:hex, :finch, "0.18.0", "944ac7d34d0bd2ac8998f79f7a811b21d87d911e77a786bc5810adb75632ada4", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "69f5045b042e531e53edc2574f15e25e735b522c37e2ddb766e15b979e03aa65"},
"git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"},
"git_ops": {:hex, :git_ops, "2.6.1", "cc7799a68c26cf814d6d1a5121415b4f5bf813de200908f930b27a2f1fe9dad5", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "ce62d07e41fe993ec22c35d5edb11cf333a21ddaead6f5d9868fcb607d42039e"},
"glob_ex": {:hex, :glob_ex, "0.1.8", "f7ef872877ca2ae7a792ab1f9ff73d9c16bf46ecb028603a8a3c5283016adc07", [:mix], [], "hexpm", "9e39d01729419a60a937c9260a43981440c43aa4cadd1fa6672fecd58241c464"},
"hpax": {:hex, :hpax, "1.0.0", "28dcf54509fe2152a3d040e4e3df5b265dcb6cb532029ecbacf4ce52caea3fd2", [:mix], [], "hexpm", "7f1314731d711e2ca5fdc7fd361296593fc2542570b3105595bb0bc6d0fad601"},
"igniter": {:hex, :igniter, "0.3.18", "da7a08eba965a89282c3a8642d7ccc718be65b09aef3d77312dbb27e3f288466", [:mix], [{:glob_ex, "~> 0.1.7", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:inflex, "~> 2.0", [hex: :inflex, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_options, "~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:owl, "~> 0.9", [hex: :owl, repo: "hexpm", optional: false]}, {:rewrite, "~> 0.9", [hex: :rewrite, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.4", [hex: :sourceror, repo: "hexpm", optional: false]}, {:spitfire, ">= 0.1.3 and < 1.0.0-0", [hex: :spitfire, repo: "hexpm", optional: false]}, {:ucwidth, "~> 0.2", [hex: :ucwidth, repo: "hexpm", optional: false]}], "hexpm", "91c7b011cfa6b5036cd84757a3a8a8e009ef59c06956789f598ada3eb8be2fc4"},
"inflex": {:hex, :inflex, "2.1.0", "a365cf0821a9dacb65067abd95008ca1b0bb7dcdd85ae59965deef2aa062924c", [:mix], [], "hexpm", "14c17d05db4ee9b6d319b0bff1bdf22aa389a25398d1952c7a0b5f3d93162dd8"},
"iterex": {:hex, :iterex, "0.1.2", "58f9b9b9a22a55cbfc7b5234a9c9c63eaac26d276b3db80936c0e1c60355a5a6", [:mix], [], "hexpm", "2e103b8bcc81757a9af121f6dc0df312c9a17220f302b1193ef720460d03029d"},
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
"libgraph": {:hex, :libgraph, "0.16.0", "3936f3eca6ef826e08880230f806bfea13193e49bf153f93edcf0239d4fd1d07", [:mix], [], "hexpm", "41ca92240e8a4138c30a7e06466acc709b0cbb795c643e9e17174a178982d6bf"},
"makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"},
"makeup_erlang": {:hex, :makeup_erlang, "1.0.1", "c7f58c120b2b5aa5fd80d540a89fdf866ed42f1f3994e4fe189abebeab610839", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "8a89a1eeccc2d798d6ea15496a6e4870b75e014d1af514b1b71fa33134f57814"},
"mime": {:hex, :mime, "2.0.6", "8f18486773d9b15f95f4f4f1e39b710045fa1de891fada4516559967276e4dc2", [:mix], [], "hexpm", "c9945363a6b26d747389aac3643f8e0e09d30499a138ad64fe8fd1d13d9b153e"},
"mint": {:hex, :mint, "1.6.2", "af6d97a4051eee4f05b5500671d47c3a67dac7386045d87a904126fd4bbcea2e", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "5ee441dffc1892f1ae59127f74afe8fd82fda6587794278d924e4d90ea3d63f9"},
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
"nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
"owl": {:hex, :owl, "0.11.0", "2cd46185d330aa2400f1c8c3cddf8d2ff6320baeff23321d1810e58127082cae", [:mix], [{:ucwidth, "~> 0.2", [hex: :ucwidth, repo: "hexpm", optional: true]}], "hexpm", "73f5783f0e963cc04a061be717a0dbb3e49ae0c4bfd55fb4b78ece8d33a65efe"},
"plug": {:hex, :plug, "1.16.1", "40c74619c12f82736d2214557dedec2e9762029b2438d6d175c5074c933edc9d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a13ff6b9006b03d7e33874945b2755253841b238c34071ed85b0e86057f8cddc"},
"plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"},
"reactor": {:hex, :reactor, "0.9.1", "082f8e9b1fd7586c0a016c2fb533835fec7eaef5ffb0263abb4473106c20b1ca", [:mix], [{:igniter, "~> 0.2", [hex: :igniter, repo: "hexpm", optional: false]}, {:iterex, "~> 0.1", [hex: :iterex, repo: "hexpm", optional: false]}, {:libgraph, "~> 0.16", [hex: :libgraph, repo: "hexpm", optional: false]}, {:spark, "~> 2.0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, "~> 0.2", [hex: :splode, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.2", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7191ddf95fdd2b65770a57a2e38dd502a94909e51ac8daf497330e67fc032dc3"},
"req": {:hex, :req, "0.5.6", "8fe1eead4a085510fe3d51ad854ca8f20a622aae46e97b302f499dfb84f726ac", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "cfaa8e720945d46654853de39d368f40362c2641c4b2153c886418914b372185"},
"rewrite": {:hex, :rewrite, "0.10.5", "6afadeae0b9d843b27ac6225e88e165884875e0aed333ef4ad3bf36f9c101bed", [:mix], [{:glob_ex, "~> 0.1", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.0", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "51cc347a4269ad3a1e7a2c4122dbac9198302b082f5615964358b4635ebf3d4f"},
"sourceror": {:hex, :sourceror, "1.5.0", "3e65d5fbb1a8e2864ad6411262c8018fee73474f5789dda12285c82999253d5d", [:mix], [], "hexpm", "4a32b5d189d8453f73278c15712f8731b89e9211e50726b798214b303b51bfc7"},
"spark": {:hex, :spark, "2.2.11", "6589ac0e50d69e5095871a5e8f3bb6107755b1cc71f05a31d7398902506dab9a", [:mix], [{:igniter, ">= 0.2.6 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.2", [hex: :sourceror, repo: "hexpm", optional: false]}], "hexpm", "662d297d0ad49a5990a72cbf342d70e90894218062da2893f2df529f70ecc2b4"},
"spitfire": {:hex, :spitfire, "0.1.3", "7ea0f544005dfbe48e615ed90250c9a271bfe126914012023fd5e4b6b82b7ec7", [:mix], [], "hexpm", "d53b5107bcff526a05c5bb54c95e77b36834550affd5830c9f58760e8c543657"},
"splode": {:hex, :splode, "0.2.4", "71046334c39605095ca4bed5d008372e56454060997da14f9868534c17b84b53", [:mix], [], "hexpm", "ca3b95f0d8d4b482b5357954fec857abd0fa3ea509d623334c1328e7382044c2"},
"telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"},
"thousand_island": {:hex, :thousand_island, "1.3.5", "6022b6338f1635b3d32406ff98d68b843ba73b3aa95cfc27154223244f3a6ca5", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2be6954916fdfe4756af3239fb6b6d75d0b8063b5df03ba76fd8a4c87849e180"},
"ucwidth": {:hex, :ucwidth, "0.2.0", "1f0a440f541d895dff142275b96355f7e91e15bca525d4a0cc788ea51f0e3441", [:mix], [], "hexpm", "c1efd1798b8eeb11fb2bec3cafa3dd9c0c3647bee020543f0340b996177355bf"},
"websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"},
}

1
test/fixtures/file.txt vendored Normal file
View file

@ -0,0 +1 @@
Marty in the Delorean with the flux capacitor.

View file

@ -0,0 +1,35 @@
defmodule Reactor.Req.DeleteTest do
@moduledoc false
use ExUnit.Case, async: true
alias Plug.Conn
defmodule DeleteReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_delete :request do
url input(:url)
http_errors value(:raise)
end
end
test "it can send a DELETE request", %{test: test} do
port = Enum.random(1000..0xFFFF)
start_link_supervised!(
{Support.HttpServer,
id: test,
port: port,
stub: fn conn ->
Conn.send_resp(conn, 200, conn.method)
end}
)
assert {:ok, response} = Reactor.run(DeleteReactor, %{url: "http://localhost:#{port}/stub"})
assert response.status == 200
assert response.body == "DELETE"
end
end

View file

@ -0,0 +1,36 @@
defmodule Reactor.Req.GetTest do
@moduledoc false
use ExUnit.Case, async: true
defmodule GetReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_get :request do
url input(:url)
http_errors value(:raise)
end
end
setup context do
port = Enum.random(1000..0xFFFF)
start_link_supervised!({Support.HttpServer, id: context.test, port: port})
{:ok, base_url: "http://localhost:#{port}/"}
end
test "it successfully performs an HTTP request", %{base_url: base_url} do
assert {:ok, response} = Reactor.run(GetReactor, %{url: base_url <> "file.txt"})
assert response.status == 200
assert response.body =~ "Marty"
end
test "it can fail when the request fails", %{base_url: base_url} do
assert {:error, error} = Reactor.run(GetReactor, %{url: base_url <> "no_file.txt"})
assert Exception.message(error) =~ "The requested URL returned error: 404"
end
end

View file

@ -0,0 +1,36 @@
defmodule Reactor.Req.HeadTest do
@moduledoc false
use ExUnit.Case, async: true
alias Plug.Conn
defmodule HeadReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_head :request do
url input(:url)
http_errors value(:raise)
end
end
test "it can send a HEAD request", %{test: test} do
port = Enum.random(1000..0xFFFF)
start_link_supervised!(
{Support.HttpServer,
id: test,
port: port,
stub: fn conn ->
assert conn.method == "HEAD"
Conn.send_resp(conn, 200, conn.method)
end}
)
assert {:ok, response} = Reactor.run(HeadReactor, %{url: "http://localhost:#{port}/stub"})
assert response.status == 200
assert response.body == ""
end
end

View file

@ -0,0 +1,34 @@
defmodule Reactor.Req.MergeTest do
@moduledoc false
use ExUnit.Case, async: true
defmodule MergeReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
input :auth
req_new :new do
url input(:url)
end
req_merge :merged do
request result(:new)
auth input(:auth)
end
return :merged
end
test "it merges requests together" do
assert {:ok, req} =
Reactor.run(MergeReactor, %{
url: "https://harton.dev/james/reactor_req",
auth: {:basic, "marty:mcfly"}
})
assert req.url == URI.parse("https://harton.dev/james/reactor_req")
assert req.options.auth == {:basic, "marty:mcfly"}
end
end

View file

@ -0,0 +1,144 @@
defmodule Reactor.Req.Dsl.NewTest do
@moduledoc false
use ExUnit.Case, async: true
@options [
adapter: Req.Request,
auth: {:basic, "marty:outatime"},
aws_sigv4: [region: "hill-valley"],
base_url: "http://harton.dev/james/reactor_req",
body: "Roads. Where we're going we don't need roads",
cache_dir: "priv/cache",
cache: true,
compress_body: true,
connect_options: [speed: "88mph"],
decode_body: false,
decode_json: [quickly: true],
finch_private: [calvin: :klein],
finch_request: &Finch.request/3,
finch: :swimming_pool,
form_multipart: true,
form: true,
headers: Macro.escape(%{"speed" => ["88mph"]}),
inet6: true,
into: [],
json: true,
max_redirects: 10,
max_retries: 10,
method: :get,
params: [name: "marty", year: "1985"],
path_params_style: :colon,
path_params: [name: "marty"],
plug: Plug,
pool_timeout: 15_000,
raw: true,
receive_timeout: 15_000,
redirect_trusted: true,
redirect: false,
retry_delay: 10_000,
retry_log_level: :warn,
retry: false,
unix_socket: "priv/pretend.sock",
url: Macro.escape(URI.new!("https://harton.dev/james/reactor_req"))
]
defmodule AllOptionsReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :adapter
input :auth
input :aws_sigv4
input :base_url
input :body
input :cache_dir
input :cache
input :compress_body
input :connect_options
input :decode_body
input :decode_json
input :finch_private
input :finch_request
input :finch
input :form_multipart
input :form
input :headers
input :inet6
input :into
input :json
input :max_redirects
input :max_retries
input :method
input :params
input :path_params_style
input :path_params
input :plug
input :pool_timeout
input :raw
input :receive_timeout
input :redirect_trusted
input :redirect
input :retry_delay
input :retry_log_level
input :retry
input :unix_socket
input :url
req_new :new do
adapter input(:adapter)
auth input(:auth)
aws_sigv4 input(:aws_sigv4)
base_url input(:base_url)
body input(:body)
cache_dir input(:cache_dir)
cache input(:cache)
compress_body input(:compress_body)
connect_options input(:connect_options)
decode_body input(:decode_body)
decode_json input(:decode_json)
finch_private input(:finch_private)
finch_request input(:finch_request)
finch input(:finch)
form_multipart input(:form_multipart)
form input(:form)
headers input(:headers)
inet6 input(:inet6)
into input(:into)
json input(:json)
max_redirects input(:max_redirects)
max_retries input(:max_retries)
method input(:method)
params input(:params)
path_params_style input(:path_params_style)
path_params input(:path_params)
plug input(:plug)
pool_timeout input(:pool_timeout)
raw input(:raw)
receive_timeout input(:receive_timeout)
redirect_trusted input(:redirect_trusted)
redirect input(:redirect)
retry_delay input(:retry_delay)
retry_log_level input(:retry_log_level)
retry input(:retry)
unix_socket input(:unix_socket)
url input(:url)
end
end
for {key, value} <- @options do
test "it passes the `#{inspect(key)}` option" do
inputs =
@options
|> Map.new(fn {k, _} -> {k, nil} end)
|> Map.put(unquote(key), unquote(value))
assert req = Reactor.run!(AllOptionsReactor, inputs)
if Map.has_key?(req, unquote(key)) do
assert req.unquote(key) == unquote(value)
else
assert req.options.unquote(key) == unquote(value)
end
end
end
end

View file

@ -0,0 +1,35 @@
defmodule Reactor.Req.PatchTest do
@moduledoc false
use ExUnit.Case, async: true
alias Plug.Conn
defmodule PatchReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_patch :request do
url input(:url)
http_errors value(:raise)
end
end
test "it can send a PATCH request", %{test: test} do
port = Enum.random(1000..0xFFFF)
start_link_supervised!(
{Support.HttpServer,
id: test,
port: port,
stub: fn conn ->
Conn.send_resp(conn, 200, conn.method)
end}
)
assert {:ok, response} = Reactor.run(PatchReactor, %{url: "http://localhost:#{port}/stub"})
assert response.status == 200
assert response.body == "PATCH"
end
end

View file

@ -0,0 +1,35 @@
defmodule Reactor.Req.PostTest do
@moduledoc false
use ExUnit.Case, async: true
alias Plug.Conn
defmodule PostReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_post :request do
url input(:url)
http_errors value(:raise)
end
end
test "it can send a POST request", %{test: test} do
port = Enum.random(1000..0xFFFF)
start_link_supervised!(
{Support.HttpServer,
id: test,
port: port,
stub: fn conn ->
Conn.send_resp(conn, 200, conn.method)
end}
)
assert {:ok, response} = Reactor.run(PostReactor, %{url: "http://localhost:#{port}/stub"})
assert response.status == 200
assert response.body == "POST"
end
end

View file

@ -0,0 +1,35 @@
defmodule Reactor.Req.PutTest do
@moduledoc false
use ExUnit.Case, async: true
alias Plug.Conn
defmodule PutReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_put :request do
url input(:url)
http_errors value(:raise)
end
end
test "it can send a PUT request", %{test: test} do
port = Enum.random(1000..0xFFFF)
start_link_supervised!(
{Support.HttpServer,
id: test,
port: port,
stub: fn conn ->
Conn.send_resp(conn, 200, conn.method)
end}
)
assert {:ok, response} = Reactor.run(PutReactor, %{url: "http://localhost:#{port}/stub"})
assert response.status == 200
assert response.body == "PUT"
end
end

View file

@ -0,0 +1,33 @@
defmodule Reactor.Req.RequestTest do
@moduledoc false
use ExUnit.Case, async: true
defmodule RequestReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_new :new do
url input(:url)
end
req_request :merged do
request result(:new)
end
end
setup context do
port = Enum.random(1000..0xFFFF)
start_link_supervised!({Support.HttpServer, id: context.test, port: port})
{:ok, base_url: "http://localhost:#{port}/"}
end
test "it executes requests", %{base_url: base_url} do
assert {:ok, response} = Reactor.run(RequestReactor, %{url: base_url <> "file.txt"})
assert response.status == 200
assert response.body =~ "Marty"
end
end

View file

@ -0,0 +1,43 @@
defmodule Reactor.Req.RunTest do
@moduledoc false
use ExUnit.Case, async: true
defmodule RunReactor do
@moduledoc false
use Reactor, extensions: [Reactor.Req]
input :url
req_new :new do
url input(:url)
method value("GET")
end
req_run :run do
request result(:new)
http_errors value(:raise)
end
return :run
end
setup context do
port = Enum.random(1000..0xFFFF)
start_link_supervised!({Support.HttpServer, id: context.test, port: port})
{:ok, base_url: "http://localhost:#{port}/"}
end
test "it successfully performs an HTTP request", %{base_url: base_url} do
assert {:ok, {_request, response}} = Reactor.run(RunReactor, %{url: base_url <> "file.txt"})
assert response.status == 200
assert response.body =~ "Marty"
end
test "it can fail when the request fails", %{base_url: base_url} do
assert {:error, error} = Reactor.run(RunReactor, %{url: base_url <> "no_file.txt"})
assert Exception.message(error) =~ "The requested URL returned error: 404"
end
end

View file

@ -0,0 +1,5 @@
defmodule Reactor.ReqTest do
@moduledoc false
use ExUnit.Case
doctest Reactor.Req
end

View file

@ -0,0 +1,43 @@
defmodule Support.HttpServer do
@moduledoc false
use Plug.Builder, init_mode: :runtime, copy_opts_to_assign: :opts
plug Plug.Static,
at: "/",
from: Path.expand("#{__DIR__}/../fixtures")
plug :maybe_stub
def maybe_stub(conn, _opts) when conn.path_info == ["stub"] do
stub = conn.assigns.opts[:stub]
if stub do
stub.(conn)
else
send_resp(conn, 500, "No, we don't want no stubs")
end
end
def maybe_stub(conn, _opts) do
send_resp(conn, 404, "Not found")
end
@doc false
def start_link(opts \\ []) do
{stub, opts} = Keyword.pop(opts, :stub)
[plug: {__MODULE__, stub: stub}, scheme: :http, startup_log: false]
|> Keyword.merge(opts)
|> Bandit.start_link()
end
@doc false
def child_spec(opts) do
{id, opts} = Keyword.pop(opts, :id, __MODULE__)
%{
id: id,
start: {__MODULE__, :start_link, [opts]}
}
end
end

1
test/test_helper.exs Normal file
View file

@ -0,0 +1 @@
ExUnit.start()