Subida del módulo y tema de PrestaShop

This commit is contained in:
Kaloyan
2026-04-09 18:31:51 +02:00
parent 12c253296f
commit 16b3ff9424
39262 changed files with 7418797 additions and 0 deletions

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,16 @@
[default]
extend-ignore-re = [
# Mock token
";iuani;ansd;ifgjbnai;sdjfgb",
]
[files]
extend-exclude = [
# Typo "achived" is a breaking change to fix
"kotlin/lib/src/main/kotlin/EventType.kt",
"kotlin/lib/src/main/kotlin/EventTypeListOptions.kt",
# False positives in randomly-generated strings
"server/svix-server/migrations/20230505222507_operational_webhook_event-types.up.sql",
"server/svix-server/migrations/20230505222507_operational_webhook_event-types.down.sql",
]

View File

@@ -0,0 +1 @@
1.24.0

View File

@@ -0,0 +1,21 @@
The MIT License
Copyright (c) 2021 Svix (https://www.svix.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,2 @@
imports_granularity = "Crate"
group_imports = "StdExternalCrate"

7540
modules/psshipping/vendor/svix/svix/bridge/Cargo.lock generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,15 @@
[workspace]
# Earlier versions of deno fail to compile in a workspace because of wgpu-hal
# The "fix" is to enable resolver 2 at the workspace. Crates with edition 2021
# use this by default, but workspaces are set independently for some reason.
# <https://stackoverflow.com/questions/73967574/dependency-fails-to-compile-in-workspace-but-not-in-package>
resolver = "2"
members = [
"svix-bridge-types",
"svix-bridge",
"svix-bridge-plugin-queue",
]
[profile.dev.package]
quote = { opt-level = 2 }

View File

@@ -0,0 +1,68 @@
# Base build
FROM rust:1.72-slim-bullseye AS build
RUN apt-get update && apt-get install -y \
build-essential=12.* \
checkinstall=1.* \
curl=7.* \
libssl-dev=* \
pkg-config=0.29.* \
zlib1g-dev=1:* \
--no-install-recommends
RUN set -ex ; \
mkdir -p /app ;\
useradd appuser ;\
chown -R appuser: /app ;\
mkdir -p /home/appuser ;\
chown -R appuser: /home/appuser
WORKDIR /app
# Hack to enable docker caching
COPY Cargo.toml .
COPY Cargo.lock .
COPY svix-bridge-types/Cargo.toml svix-bridge-types/
COPY svix-bridge-plugin-queue/Cargo.toml svix-bridge-plugin-queue/
COPY svix-bridge/Cargo.toml svix-bridge/
RUN set -ex ;\
mkdir svix-bridge-plugin-queue/src ;\
mkdir svix-bridge-types/src ;\
mkdir svix-bridge/src ;\
echo '' > svix-bridge-plugin-queue/src/lib.rs ;\
echo '' > svix-bridge-types/src/lib.rs ;\
echo 'fn main() { println!("Dummy!"); }' > svix-bridge/src/main.rs ;\
cargo build --release ;\
rm -rf \
svix-bridge-plugin-queue/src \
svix-bridge-types/src \
svix-bridge/src
COPY . .
# touching the lib.rs/main.rs ensures cargo rebuilds them instead of considering them already built.
RUN touch */src/lib.rs && touch */src/main.rs
RUN cargo build --release --frozen
# Production
FROM debian:bullseye-slim AS prod
RUN set -ex ; \
mkdir -p /app ;\
useradd appuser ;\
chown -R appuser: /app ;\
mkdir -p /home/appuser ;\
chown -R appuser: /home/appuser
RUN apt-get update ;\
apt-get install --no-install-recommends -y ca-certificates=20210119; \
update-ca-certificates; \
rm -rf /var/lib/apt/lists/*
USER appuser
COPY --from=build /app/target/release/svix-bridge /usr/local/bin/svix-bridge
EXPOSE 5000
# Will fail if there's no `svix-bridge.yaml` in the CWD or `SVIX_BRIDGE_CFG` is not set to a valid
# path to a config.
CMD ["svix-bridge"]

View File

@@ -0,0 +1,20 @@
Copyright (c) 2021-2023 Svix Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,20 @@
#!/bin/bash
if [[ -z "$TEST_COMMAND" ]]; then
if [[ -z "$CARGO_HOME" ]]; then
CARGO_HOME="$HOME/.cargo"
fi
if command -v cargo-nextest || [[ -e "$CARGO_HOME/bin/cargo-nextest" ]]; then
TEST_COMMAND="cargo nextest run"
else
TEST_COMMAND="cargo test"
fi
fi
AWS_DEFAULT_REGION="elasticmq" \
AWS_ACCESS_KEY_ID="x" \
AWS_SECRET_ACCESS_KEY="x" \
PUBSUB_EMULATOR_HOST=localhost:8085 \
PUBSUB_PROJECT_ID=local-project \
${TEST_COMMAND} --all-features -- "$@"

View File

@@ -0,0 +1,27 @@
[package]
name = "svix-bridge-plugin-queue"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
omniqueue = "0.2.0"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
svix-bridge-types = { path = "../svix-bridge-types" }
tokio = { version = "1", features = ["full"] }
tokio-executor-trait = "2.1"
tokio-reactor-trait = "1.1"
tracing = "0.1"
[dev-dependencies]
aws-config = "1.1.5"
aws-sdk-sqs = "1.13.0"
fastrand = "2.0.1"
google-cloud-googleapis = "0.12.0"
google-cloud-pubsub = "0.23.0"
lapin = "2"
redis = { version = "0.24.0", features = ["tokio-comp", "streams"] }
tracing-subscriber = "0.3"
wiremock = "0.5.18"

View File

@@ -0,0 +1,20 @@
Copyright (c) 2021-2023 Svix Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,164 @@
use serde::Deserialize;
use svix_bridge_types::{
ReceiverOutput, SenderInput, SenderOutputOpts, TransformationConfig, TransformerInputFormat,
};
use crate::sender_input::QueueSender;
pub use crate::{
gcp_pubsub::{GCPPubSubInputOpts, GCPPubSubOutputOpts},
rabbitmq::{RabbitMqInputOpts, RabbitMqOutputOpts},
receiver_output::QueueForwarder,
redis::{RedisInputOpts, RedisOutputOpts},
sqs::{SqsInputOpts, SqsOutputOpts},
};
#[derive(Deserialize)]
pub struct QueueConsumerConfig {
pub name: String,
pub input: SenderInputOpts,
#[serde(default)]
pub transformation: Option<TransformationConfig>,
pub output: SenderOutputOpts,
}
impl QueueConsumerConfig {
pub fn into_sender_input(self) -> Result<Box<dyn SenderInput>, &'static str> {
// FIXME: see if this check is still needed. String transforms worked for the omniqueue redis receiver, I think?
if matches!(self.input, SenderInputOpts::Redis(_))
&& self
.transformation
.as_ref()
.map(|t| t.format() != TransformerInputFormat::Json)
.unwrap_or_default()
{
return Err("redis only supports json formatted transformations");
}
Ok(Box::new(QueueSender::new(
self.name,
self.input,
self.transformation,
self.output,
)))
}
}
pub async fn into_receiver_output(
name: String,
opts: ReceiverOutputOpts,
// Annoying to have to pass this, but certain backends (redis) only work with certain transformations (json).
transformation: Option<&TransformationConfig>,
) -> Result<Box<dyn ReceiverOutput>, crate::Error> {
// FIXME: see if this check is still needed. String transforms worked for the omniqueue redis receiver, I think?
if matches!(opts, ReceiverOutputOpts::Redis(_))
&& transformation
.as_ref()
.map(|t| t.format() != TransformerInputFormat::Json)
.unwrap_or_default()
{
return Err(crate::Error::Generic(
"redis only supports json formatted transformations".to_string(),
));
}
let forwarder = QueueForwarder::from_receiver_output_opts(name, opts).await?;
Ok(Box::new(forwarder))
}
// TODO: feature flag the variants, thread the features down through to generic-queue
#[derive(Debug, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum SenderInputOpts {
#[serde(rename = "gcp-pubsub")]
GCPPubSub(GCPPubSubInputOpts),
RabbitMQ(RabbitMqInputOpts),
Redis(RedisInputOpts),
SQS(SqsInputOpts),
}
#[derive(Clone, Debug, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum ReceiverOutputOpts {
#[serde(rename = "gcp-pubsub")]
GCPPubSub(GCPPubSubOutputOpts),
RabbitMQ(RabbitMqOutputOpts),
Redis(RedisOutputOpts),
SQS(SqsOutputOpts),
}
#[cfg(test)]
mod tests {
use svix_bridge_types::{
SenderOutputOpts, SvixSenderOutputOpts, TransformationConfig, TransformerInputFormat,
};
use super::{into_receiver_output, QueueConsumerConfig};
use crate::{
config::{ReceiverOutputOpts, SenderInputOpts},
redis::{RedisInputOpts, RedisOutputOpts},
};
// FIXME: can't support raw payload access for redis because it requires JSON internally.
// Revisit after `omniqueue` adoption.
#[test]
fn redis_sender_with_string_transformation_is_err() {
let cfg = QueueConsumerConfig {
name: "redis-with-string-transformation".to_string(),
input: SenderInputOpts::Redis(RedisInputOpts {
dsn: "".to_string(),
max_connections: 0,
reinsert_on_nack: false,
queue_key: "".to_string(),
delayed_queue_key: None,
consumer_group: "".to_string(),
consumer_name: "".to_string(),
ack_deadline_ms: 2_000,
}),
transformation: Some(TransformationConfig::Explicit {
format: TransformerInputFormat::String,
src: String::new(),
}),
output: SenderOutputOpts::Svix(SvixSenderOutputOpts {
token: "".to_string(),
options: None,
}),
};
assert_eq!(
cfg.into_sender_input()
.err()
.expect("invalid config didn't result in error"),
"redis only supports json formatted transformations"
)
}
// FIXME: can't support raw payload access for redis because it requires JSON internally.
// Revisit after `omniqueue` adoption.
#[tokio::test]
async fn test_redis_receiver_string_transform_is_err() {
let redis_out = ReceiverOutputOpts::Redis(RedisOutputOpts {
dsn: "".to_string(),
max_connections: 0,
queue_key: "".to_string(),
delayed_queue_key: None,
ack_deadline_ms: 2_000,
});
// Explicit String fails
let res = into_receiver_output(
"".to_string(),
redis_out,
Some(TransformationConfig::Explicit {
src: String::new(),
format: TransformerInputFormat::String,
})
.as_ref(),
)
.await;
assert!(matches!(
res.err()
.expect("invalid config didn't result in error"),
crate::error::Error::Generic(msg) if msg == "redis only supports json formatted transformations"
));
}
}

View File

@@ -0,0 +1,47 @@
pub use omniqueue::QueueError;
use svix_bridge_types::svix;
pub enum Error {
Payload(String),
Json(serde_json::Error),
Queue(QueueError),
Svix(svix::error::Error),
Generic(String),
}
pub type Result<T> = std::result::Result<T, Error>;
impl From<svix::error::Error> for Error {
fn from(value: svix::error::Error) -> Self {
Error::Svix(value)
}
}
impl From<serde_json::Error> for Error {
fn from(value: serde_json::Error) -> Self {
Error::Json(value)
}
}
impl From<QueueError> for Error {
fn from(value: QueueError) -> Self {
Error::Queue(value)
}
}
impl From<String> for Error {
fn from(value: String) -> Self {
Self::Generic(value)
}
}
impl From<Error> for std::io::Error {
fn from(value: Error) -> Self {
match value {
Error::Payload(e) => std::io::Error::new(std::io::ErrorKind::Other, e),
Error::Json(e) => std::io::Error::new(std::io::ErrorKind::Other, e),
Error::Queue(e) => std::io::Error::new(std::io::ErrorKind::Other, e),
Error::Svix(e) => std::io::Error::new(std::io::ErrorKind::Other, e),
Error::Generic(e) => std::io::Error::new(std::io::ErrorKind::Other, e),
}
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,44 @@
use std::path::PathBuf;
use omniqueue::{backends, DynConsumer, DynProducer};
use serde::Deserialize;
use crate::error::{Error, Result};
#[derive(Debug, Default, Deserialize)]
pub struct GCPPubSubInputOpts {
pub subscription_id: String,
pub credentials_file: Option<PathBuf>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct GCPPubSubOutputOpts {
pub topic: String,
pub credentials_file: Option<PathBuf>,
}
pub async fn consumer(cfg: &GCPPubSubInputOpts) -> Result<DynConsumer> {
backends::GcpPubSubBackend::builder(backends::GcpPubSubConfig {
subscription_id: cfg.subscription_id.clone(),
credentials_file: cfg.credentials_file.clone(),
// Don't need this. Topics are for producers only.
topic_id: String::new(),
})
.make_dynamic()
.build_consumer()
.await
.map_err(Error::from)
}
pub async fn producer(cfg: &GCPPubSubOutputOpts) -> Result<DynProducer> {
backends::GcpPubSubBackend::builder(backends::GcpPubSubConfig {
topic_id: cfg.topic.clone(),
credentials_file: cfg.credentials_file.clone(),
// Don't need this. Subscriptions are for consumers only.
subscription_id: String::new(),
})
.make_dynamic()
.build_producer()
.await
.map_err(Error::from)
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,244 @@
use std::time::{Duration, Instant};
use omniqueue::{Delivery, DynConsumer, QueueError};
use svix_bridge_types::{
async_trait, svix::api::Svix, CreateMessageRequest, JsObject, TransformationConfig,
TransformerInput, TransformerInputFormat, TransformerJob, TransformerOutput, TransformerTx,
};
pub const PLUGIN_NAME: &str = env!("CARGO_PKG_NAME");
pub const PLUGIN_VERS: &str = env!("CARGO_PKG_VERSION");
pub mod config;
mod error;
mod gcp_pubsub;
mod rabbitmq;
mod receiver_output;
mod redis;
pub mod sender_input;
mod sqs;
use error::Error;
/// Newtype for [`omniqueue::queue::Delivery`].
///
/// Mostly vestigial at this point, though it doesn't hurt to have something to act as a facade to
/// group helper functions for handling payload details.
pub struct DeliveryWrapper(Delivery);
impl From<Delivery> for DeliveryWrapper {
fn from(value: Delivery) -> Self {
Self(value)
}
}
impl DeliveryWrapper {
/// Delegates to the inner delivery types ack method.
async fn ack(self) -> Result<(), QueueError> {
self.0.ack().await.map_err(|(e, _)| e)
}
/// Delegates to the inner delivery types nack method.
async fn nack(self) -> Result<(), QueueError> {
self.0.nack().await.map_err(|(e, _)| e)
}
/// Decodes the inner delivery as String.
fn raw_payload(&self) -> Result<&str, QueueError> {
// TODO: used to be unsupported for redis. Is it now? Check for skipped tests to prove it.
let bytes = self.0.borrow_payload().ok_or(QueueError::NoData)?;
std::str::from_utf8(bytes).map_err(QueueError::generic)
}
/// Decodes the inner delivery as `serde_json::Value`.
fn payload(&self) -> Result<serde_json::Value, QueueError> {
self.0.payload_serde_json()?.ok_or(QueueError::NoData)
}
}
#[async_trait]
trait Consumer {
/// The source of the stream of messages, e.g. the name or id for the queue, subscription, etc.
fn source(&self) -> &str;
/// The name of the messaging system, e.g. rabbitmq, sqs, etc.
fn system(&self) -> &str;
/// Gets the channel sender for running transformations.
fn transformer_tx(&self) -> Option<&TransformerTx>;
/// The js source for the transformation to run on each payload.
fn transformation(&self) -> Option<&TransformationConfig>;
/// The client to use when creating messages in svix.
fn svix_client(&self) -> &Svix;
async fn transform(
&self,
script: String,
input: TransformerInput,
) -> std::io::Result<JsObject> {
let (job, rx) = TransformerJob::new(script, input);
self.transformer_tx()
.as_ref()
.expect("transformations not configured")
.send(job)
.map_err(|e| Error::Generic(e.to_string()))?;
let ret = rx
.await
.map_err(|_e| Error::Generic("transformation rx failed".to_string()))
.and_then(|x| {
x.map_err(|_e| Error::Generic("transformation execution failed".to_string()))
})?;
match ret {
TransformerOutput::Object(v) => Ok(v),
TransformerOutput::Invalid => {
Err(Error::Generic("transformation produced unexpected value".to_string()).into())
}
}
}
/// Gets consumer (likely based on a config value), called by [`consume`].
async fn consumer(&self) -> std::io::Result<DynConsumer>;
/// Main consumer loop
async fn consume(&self) -> std::io::Result<()> {
let mut consumer = self.consumer().await?;
tracing::debug!("{} consuming: {}", self.system(), self.source(),);
loop {
self.receive(&mut consumer).await?;
}
}
/// Pulls N messages off the queue and feeds them to [`Self::process`].
#[tracing::instrument(skip_all,
fields(
otel.kind = "CONSUMER",
messaging.system = self.system(),
messaging.operation = "receive",
messaging.source = self.source(),
svix_bridge_plugin.name = crate::PLUGIN_NAME,
svix_bridge_plugin.vers = crate::PLUGIN_VERS,
)
)]
async fn receive(&self, consumer: &mut DynConsumer) -> std::io::Result<()> {
// FIXME: omniqueue has a fixed batch size of 1 afaict. Would be nicer to pull N at a time.
let delivery = consumer.receive().await.map_err(Error::from)?;
self.process(delivery.into()).await?;
Ok(())
}
/// Parses the delivery as JSON and feeds it into [`create_svix_message`].
/// Will nack the delivery if either the JSON parse, transformation, or the request to svix fails.
#[tracing::instrument(skip_all, fields(messaging.operation = "process"))]
async fn process(&self, delivery: DeliveryWrapper) -> std::io::Result<()> {
let payload = if let Some(xform_cfg) = self.transformation() {
let input = match xform_cfg.format() {
TransformerInputFormat::Json => {
let json_payload = match delivery.payload() {
Ok(p) => p,
Err(e) => {
tracing::warn!("{e}");
delivery.nack().await.map_err(Error::from)?;
return Ok(());
}
};
TransformerInput::JSON(json_payload)
}
TransformerInputFormat::String => {
// N.b. our redis backend doesn't support string payloads, but higher up in the
// call stack, during the plugin construction, we should be catching this and
// giving an error about bad config.
// If we get here somehow with a redis delivery, this call will panic.
let raw_payload = match delivery.raw_payload() {
Ok(p) => p,
Err(e) => {
tracing::warn!("{e}");
delivery.nack().await.map_err(Error::from)?;
return Ok(());
}
};
// FIXME: if we add a lifetime to `TransformerInput` we might avoid this allocation.
TransformerInput::String(raw_payload.to_string())
}
};
let script = xform_cfg.source().clone();
match self.transform(script, input).await {
Err(e) => {
tracing::error!("nack: {e}");
delivery.nack().await.map_err(Error::from)?;
return Ok(());
}
Ok(x) => serde_json::from_value(serde_json::Value::Object(x))?,
}
} else {
// Parse as JSON when not using a transformation because Create Message requires JSON.
// If this fails, the config needs to change.
let json_payload = match delivery.payload() {
Ok(p) => p,
Err(e) => {
tracing::warn!("{e}");
delivery.nack().await.map_err(Error::from)?;
return Ok(());
}
};
serde_json::from_value(json_payload)?
};
match create_svix_message(self.svix_client(), payload).await {
Ok(_) => {
tracing::trace!("ack");
delivery.ack().await.map_err(Error::from)?
}
Err(e) => {
tracing::error!("nack: {e}");
delivery.nack().await.map_err(Error::from)?
}
}
Ok(())
}
}
async fn run_inner(consumer: &(impl Consumer + Send + Sync)) -> std::io::Result<()> {
let mut fails: u64 = 0;
let mut last_fail = Instant::now();
let system_name = consumer.system();
let source = consumer.source();
tracing::info!("{system_name} starting: {source}");
loop {
if let Err(e) = consumer.consume().await {
tracing::error!("{e}");
}
tracing::error!("{system_name} disconnected: {source}");
if last_fail.elapsed() > Duration::from_secs(10) {
// reset the fail count if we didn't have a hiccup in the past short while.
tracing::trace!("been a while since last fail, resetting count");
fails = 0;
} else {
fails += 1;
}
last_fail = Instant::now();
tokio::time::sleep(Duration::from_millis((300 * fails).min(3000))).await;
}
}
#[tracing::instrument(skip_all, level = "error", fields(
app_id,
event_type = message.event_type
))]
async fn create_svix_message(
svix: &Svix,
CreateMessageRequest {
app_id,
message,
post_options,
}: CreateMessageRequest,
) -> std::io::Result<()> {
svix.message()
.create(app_id, message, post_options.map(Into::into))
.await
.map_err(Error::from)?;
Ok(())
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,85 @@
use omniqueue::{backends, DynConsumer, DynProducer};
use serde::Deserialize;
use crate::error::{Error, Result};
#[derive(Debug, Deserialize)]
pub struct RabbitMqInputOpts {
/// Connection string for RabbitMQ.
pub uri: String,
/// The name of the queue to consume from.
/// N.b. the queue must be declared before the consumer can connect to it.
pub queue_name: String,
/// Identifier for the consumer.
#[serde(default)]
pub consumer_tag: Option<String>,
#[serde(default)]
pub consume_opts: Option<backends::rabbitmq::BasicConsumeOptions>,
#[serde(default)]
pub consume_args: Option<backends::rabbitmq::FieldTable>,
#[serde(default = "default_requeue")]
pub requeue_on_nack: bool,
}
fn default_requeue() -> bool {
true
}
#[derive(Clone, Debug, Deserialize)]
pub struct RabbitMqOutputOpts {
/// Connection string for RabbitMQ.
pub uri: String,
/// The exchange to publish messages to.
pub exchange: String,
/// The routing key to publish messages to.
pub routing_key: String,
#[serde(default)]
pub publish_options: backends::rabbitmq::BasicPublishOptions,
#[serde(default)]
pub publish_properties: backends::rabbitmq::BasicProperties,
}
pub async fn consumer(cfg: &RabbitMqInputOpts) -> Result<DynConsumer> {
backends::rabbitmq::RabbitMqBackend::builder(backends::rabbitmq::RabbitMqConfig {
uri: cfg.uri.clone(),
connection_properties: backends::rabbitmq::ConnectionProperties::default(),
publish_exchange: String::new(),
publish_routing_key: String::new(),
publish_options: backends::rabbitmq::BasicPublishOptions::default(),
publish_properties: backends::rabbitmq::BasicProperties::default(),
consume_queue: cfg.queue_name.clone(),
consumer_tag: cfg.consumer_tag.clone().unwrap_or_default(),
consume_options: cfg.consume_opts.unwrap_or_default(),
consume_arguments: cfg.consume_args.clone().unwrap_or_default(),
consume_prefetch_count: None,
requeue_on_nack: cfg.requeue_on_nack,
})
.make_dynamic()
.build_consumer()
.await
.map_err(Error::from)
}
pub async fn producer(cfg: &RabbitMqOutputOpts) -> Result<DynProducer> {
backends::rabbitmq::RabbitMqBackend::builder(backends::rabbitmq::RabbitMqConfig {
uri: cfg.uri.clone(),
// N.b the connection properties type is not serde-friendly. If we want to expose some
// of these settings we'll probably need to provide our own type and build the real one
// here from cfg.
connection_properties: Default::default(),
publish_exchange: cfg.exchange.clone(),
publish_routing_key: cfg.routing_key.clone(),
publish_options: cfg.publish_options,
publish_properties: cfg.publish_properties.clone(),
// consumer stuff we don't care about
consume_queue: "".to_string(),
consumer_tag: "".to_string(),
consume_options: Default::default(),
consume_arguments: Default::default(),
consume_prefetch_count: None,
requeue_on_nack: false,
})
.make_dynamic()
.build_producer()
.await
.map_err(Error::from)
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,52 @@
use std::sync::Arc;
use omniqueue::DynProducer;
use svix_bridge_types::{async_trait, ForwardRequest, ReceiverOutput};
use crate::{config::ReceiverOutputOpts, error::Result};
#[derive(Clone)]
pub struct QueueForwarder {
name: String,
// FIXME: if we retain things like the queue name we can show this in the Debug impl
// FIXME: raw payloads not yet supported for receivers, but probably should be.
sender: Arc<DynProducer>,
}
impl QueueForwarder {
pub async fn from_receiver_output_opts(
name: String,
opts: ReceiverOutputOpts,
) -> Result<QueueForwarder> {
let sender = match opts {
ReceiverOutputOpts::GCPPubSub(cfg) => crate::gcp_pubsub::producer(&cfg).await?,
ReceiverOutputOpts::RabbitMQ(cfg) => crate::rabbitmq::producer(&cfg).await?,
ReceiverOutputOpts::Redis(cfg) => crate::redis::producer(&cfg).await?,
ReceiverOutputOpts::SQS(cfg) => crate::sqs::producer(&cfg).await?,
};
Ok(QueueForwarder {
name,
sender: Arc::new(sender),
})
}
}
impl std::fmt::Debug for QueueForwarder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("QueueForwarder").finish()
}
}
#[async_trait]
impl ReceiverOutput for QueueForwarder {
fn name(&self) -> &str {
&self.name
}
async fn handle(&self, request: ForwardRequest) -> std::io::Result<()> {
Ok(self
.sender
.send_serde_json(&request.payload)
.await
.map_err(crate::Error::from)?)
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,92 @@
use omniqueue::{backends, DynConsumer, DynProducer};
use serde::Deserialize;
use crate::error::{Error, Result};
#[derive(Debug, Default, Deserialize)]
pub struct RedisInputOpts {
pub dsn: String,
pub max_connections: u16,
#[serde(default = "default_reinsert_on_nack")]
pub reinsert_on_nack: bool,
pub queue_key: String,
pub delayed_queue_key: Option<String>,
pub consumer_group: String,
pub consumer_name: String,
#[serde(default = "default_ack_deadline_ms")]
pub ack_deadline_ms: i64,
}
fn default_reinsert_on_nack() -> bool {
true
}
#[derive(Clone, Debug, Deserialize)]
pub struct RedisOutputOpts {
pub dsn: String,
pub max_connections: u16,
pub queue_key: String,
pub delayed_queue_key: Option<String>,
#[serde(default = "default_ack_deadline_ms")]
pub ack_deadline_ms: i64,
}
fn default_ack_deadline_ms() -> i64 {
5_000
}
pub async fn consumer(cfg: &RedisInputOpts) -> Result<DynConsumer> {
let delayed_queue_key = cfg
.delayed_queue_key
.clone()
.unwrap_or_else(|| format!("{}_delays", cfg.queue_key));
let delayed_lock_key = format!("{delayed_queue_key}_lock");
backends::RedisBackend::<backends::redis::RedisMultiplexedConnectionManager>::builder(
backends::RedisConfig {
dsn: cfg.dsn.clone(),
max_connections: cfg.max_connections,
reinsert_on_nack: cfg.reinsert_on_nack,
queue_key: cfg.queue_key.clone(),
delayed_queue_key,
delayed_lock_key,
consumer_group: cfg.consumer_group.clone(),
consumer_name: cfg.consumer_name.clone(),
// FIXME: expose in config?
payload_key: "payload".to_string(),
ack_deadline_ms: cfg.ack_deadline_ms,
},
)
.make_dynamic()
.build_consumer()
.await
.map_err(Error::from)
}
pub async fn producer(cfg: &RedisOutputOpts) -> Result<DynProducer> {
let delayed_queue_key = cfg
.delayed_queue_key
.clone()
.unwrap_or_else(|| format!("{}_delays", cfg.queue_key));
let delayed_lock_key = format!("{delayed_queue_key}_lock");
backends::RedisBackend::<backends::redis::RedisMultiplexedConnectionManager>::builder(
backends::RedisConfig {
dsn: cfg.dsn.clone(),
max_connections: cfg.max_connections,
queue_key: cfg.queue_key.clone(),
delayed_queue_key,
delayed_lock_key,
// FIXME: expose in config?
payload_key: "payload".to_string(),
// consumer stuff we don't care about.
reinsert_on_nack: false,
consumer_group: String::new(),
consumer_name: String::new(),
ack_deadline_ms: cfg.ack_deadline_ms,
},
)
.make_dynamic()
.build_producer()
.await
.map_err(Error::from)
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,112 @@
use omniqueue::DynConsumer;
use svix_bridge_types::{
async_trait, svix::api::Svix, SenderInput, SenderOutputOpts, TransformationConfig,
TransformerTx,
};
use crate::{
config::SenderInputOpts, error::Error, gcp_pubsub, rabbitmq, run_inner, sqs, Consumer,
};
pub struct QueueSender {
name: String,
source: String,
system: String,
input_opts: SenderInputOpts,
transformation: Option<TransformationConfig>,
transformer_tx: Option<TransformerTx>,
svix_client: Svix,
}
impl std::fmt::Debug for QueueSender {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SenderInput").finish()
}
}
fn system_name(opts: &SenderInputOpts) -> &'static str {
match opts {
SenderInputOpts::GCPPubSub(_) => "gcp-pubsub",
SenderInputOpts::RabbitMQ(_) => "rabbitmq",
SenderInputOpts::Redis(_) => "redis",
SenderInputOpts::SQS(_) => "sqs",
}
}
fn source_name(opts: &SenderInputOpts) -> &str {
match opts {
SenderInputOpts::GCPPubSub(opts) => &opts.subscription_id,
SenderInputOpts::RabbitMQ(opts) => &opts.queue_name,
SenderInputOpts::Redis(opts) => &opts.queue_key,
SenderInputOpts::SQS(opts) => &opts.queue_dsn,
}
}
impl QueueSender {
pub fn new(
name: String,
input: SenderInputOpts,
transformation: Option<TransformationConfig>,
output: SenderOutputOpts,
) -> Self {
Self {
name,
source: source_name(&input).into(),
system: system_name(&input).into(),
input_opts: input,
transformation,
transformer_tx: None,
svix_client: match output {
SenderOutputOpts::Svix(output) => {
Svix::new(output.token, output.options.map(Into::into))
}
},
}
}
}
#[async_trait]
impl Consumer for QueueSender {
fn source(&self) -> &str {
&self.source
}
fn system(&self) -> &str {
&self.system
}
fn transformer_tx(&self) -> Option<&TransformerTx> {
self.transformer_tx.as_ref()
}
fn transformation(&self) -> Option<&TransformationConfig> {
self.transformation.as_ref()
}
fn svix_client(&self) -> &Svix {
&self.svix_client
}
async fn consumer(&self) -> std::io::Result<DynConsumer> {
Ok(match &self.input_opts {
SenderInputOpts::GCPPubSub(cfg) => gcp_pubsub::consumer(cfg).await,
SenderInputOpts::RabbitMQ(cfg) => rabbitmq::consumer(cfg).await,
SenderInputOpts::Redis(cfg) => crate::redis::consumer(cfg).await,
SenderInputOpts::SQS(cfg) => sqs::consumer(cfg).await,
}
.map_err(Error::from)?)
}
}
#[async_trait]
impl SenderInput for QueueSender {
fn name(&self) -> &str {
&self.name
}
fn set_transformer(&mut self, tx: Option<TransformerTx>) {
self.transformer_tx = tx;
}
async fn run(&self) -> std::io::Result<()> {
run_inner(self).await
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,40 @@
use omniqueue::{backends, DynConsumer, DynProducer};
use serde::Deserialize;
use crate::error::{Error, Result};
#[derive(Debug, Default, Deserialize)]
pub struct SqsInputOpts {
pub queue_dsn: String,
#[serde(default)]
pub override_endpoint: bool,
}
#[derive(Clone, Debug, Deserialize)]
pub struct SqsOutputOpts {
pub queue_dsn: String,
#[serde(default)]
pub override_endpoint: bool,
}
pub async fn consumer(cfg: &SqsInputOpts) -> Result<DynConsumer> {
backends::SqsBackend::builder(backends::SqsConfig {
queue_dsn: cfg.queue_dsn.clone(),
override_endpoint: cfg.override_endpoint,
})
.make_dynamic()
.build_consumer()
.await
.map_err(Error::from)
}
pub async fn producer(cfg: &SqsOutputOpts) -> Result<DynProducer> {
backends::SqsBackend::builder(backends::SqsConfig {
queue_dsn: cfg.queue_dsn.clone(),
override_endpoint: cfg.override_endpoint,
})
.make_dynamic()
.build_producer()
.await
.map_err(Error::from)
}

View File

@@ -0,0 +1,13 @@
[package]
name = "svix-bridge-types"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-trait = "0.1"
tokio = { version = "1", features = ["full"] }
serde_json = "1"
serde = { version = "1", features = ["derive"] }
svix = "1.17.0"

View File

@@ -0,0 +1,20 @@
Copyright (c) 2021-2023 Svix Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,252 @@
pub use async_trait::async_trait;
use serde::{Deserialize, Serialize};
pub use svix;
use svix::api::{MessageIn, PostOptions as PostOptions_, SvixOptions as _SvixOptions};
use tokio::sync::{mpsc, oneshot};
#[derive(Deserialize, Default, Eq, PartialEq, Copy, Clone)]
#[serde(rename_all = "lowercase")]
pub enum TransformerInputFormat {
String,
#[default]
Json,
}
#[derive(Deserialize, Clone)]
#[serde(untagged)]
pub enum TransformationConfig {
/// If the config has a string value, we assume it expects the input parsed as json
/// ```yaml
/// transformation: function handler(x) {return { payload: x.foobar }; }
/// ```
ImplicitJson(String),
/// When the config has format/src fields, then you can optionally set the format to `string`,
/// in which case you have to parse it yourself inside the transformation.
/// ```yaml
/// transformation:
/// format: string
/// src: function handler(x) { return { payload: JSON.parse(x).foobar }; }
/// ```
Explicit {
format: TransformerInputFormat,
src: String,
},
}
impl TransformationConfig {
pub fn source(&self) -> &String {
match self {
TransformationConfig::ImplicitJson(src) => src,
TransformationConfig::Explicit { src, .. } => src,
}
}
pub fn format(&self) -> TransformerInputFormat {
match self {
TransformationConfig::ImplicitJson(_) => TransformerInputFormat::Json,
TransformationConfig::Explicit { format, .. } => *format,
}
}
}
impl<S> From<S> for TransformationConfig
where
S: Into<String>,
{
fn from(value: S) -> Self {
Self::ImplicitJson(value.into())
}
}
#[derive(Serialize)]
#[serde(untagged)]
pub enum TransformerInput {
/// Transformations accept arbitrary json here, not restricted to an Object type.
/// The thing receiving the value will error if it can't marshall into a type it needs.
JSON(serde_json::Value),
/// Aka "raw", we take the input as a utf-8 string and the transformation does whatever it
/// wants with it.
String(String),
}
impl From<serde_json::Value> for TransformerInput {
fn from(value: serde_json::Value) -> Self {
Self::JSON(value)
}
}
impl From<String> for TransformerInput {
fn from(value: String) -> Self {
Self::String(value)
}
}
/// Plain old JSON objects are what the transformations expect to receive and produce.
pub type JsObject = serde_json::Map<String, serde_json::Value>;
/// A channel for plugins to send payloads/scripts to for execution.
pub type TransformerTx = mpsc::UnboundedSender<TransformerJob>;
/// The receiver side for transformations. The JS executor reads from this.
pub type TransformerRx = mpsc::UnboundedReceiver<TransformerJob>;
/// A oneshot channel for the JS executor to "publish" return values to once complete.
// FIXME: better error type?
pub type TransformerCallbackTx = oneshot::Sender<Result<TransformerOutput, ()>>;
/// Used by the caller of the transformer to await the execution's output.
// FIXME: better error type?
pub type TransformerCallbackRx = oneshot::Receiver<Result<TransformerOutput, ()>>;
/// A transformation job sent to the JS executor.
/// Once the script has been run on the payload, the transformed payload is sent back through the
/// callback channel.
pub struct TransformerJob {
pub callback_tx: TransformerCallbackTx,
pub input: TransformerInput,
pub script: String,
}
#[derive(Debug)]
pub enum TransformerOutput {
/// A successfully transformed payload.
// Both senders and receivers require a map type (Object) but have different requirements which
// are best validated after the fact. For now, we validate only that we get a map type back.
Object(JsObject),
/// For cases where the JS script executes successfully but produces an unexpected output.
Invalid,
}
impl TransformerJob {
pub fn new(script: String, input: TransformerInput) -> (Self, TransformerCallbackRx) {
let (callback_tx, callback_rx) = oneshot::channel();
(
Self {
input,
script,
callback_tx,
},
callback_rx,
)
}
}
/// Effectively a black box to the supervisor.
///
/// Plugins should run until they are done, and likely they should not be "done" until the program
/// exits.
#[async_trait]
pub trait SenderInput: Send {
fn name(&self) -> &str;
/// For plugins that want to run JS transformations on payloads.
/// Giving them a sender lets them pass messages to the JS executor.
fn set_transformer(&mut self, _tx: Option<TransformerTx>) {}
async fn run(&self) -> std::io::Result<()>;
}
/// Represents something we can hand a webhook payload to.
/// Aka a "forwarder."
///
/// To start, we're only using this in conjunction with an HTTP server "owned" by the bridge binary.
#[async_trait]
pub trait ReceiverOutput: Send + Sync {
fn name(&self) -> &str;
async fn handle(&self, request: ForwardRequest) -> std::io::Result<()>;
}
#[derive(Deserialize, Debug, Clone, Default)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum WebhookVerifier {
Svix {
endpoint_secret: String,
},
#[default]
None,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum ReceiverInputOpts {
Webhook {
path_id: String,
#[serde(default)]
verification: WebhookVerifier,
},
#[serde(rename = "svix-webhook")]
SvixWebhook {
path_id: String,
endpoint_secret: String,
},
}
impl ReceiverInputOpts {
pub fn path_id(&self) -> &str {
match self {
ReceiverInputOpts::Webhook { path_id, .. }
| ReceiverInputOpts::SvixWebhook { path_id, .. } => path_id,
}
}
}
// N.b. the codegen types we get from openapi don't impl Deserialize so we need our own version.
#[derive(Debug, Default, Deserialize)]
pub struct SvixOptions {
#[serde(default)]
pub debug: bool,
pub server_url: Option<String>,
}
impl From<SvixOptions> for _SvixOptions {
fn from(SvixOptions { debug, server_url }: SvixOptions) -> Self {
_SvixOptions { debug, server_url }
}
}
#[derive(Debug, Deserialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum SenderOutputOpts {
Svix(SvixSenderOutputOpts),
}
#[derive(Debug, Deserialize)]
pub struct SvixSenderOutputOpts {
/// Svix API token for the client.
pub token: String,
/// Options for the Svix client.
#[serde(default)]
pub options: Option<SvixOptions>,
}
#[derive(Clone, Default, Deserialize, Serialize)]
pub struct PostOptions {
idempotency_key: Option<String>,
}
impl From<PostOptions> for PostOptions_ {
fn from(value: PostOptions) -> Self {
PostOptions_ {
idempotency_key: value.idempotency_key,
}
}
}
/// Senders convert messages into Create Message API calls so the JSON pulled out of message queues
/// or produced by transformations need to conform to this shape.
#[derive(Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CreateMessageRequest {
pub app_id: String,
pub message: MessageIn,
#[serde(skip_serializing_if = "Option::is_none")]
pub post_options: Option<PostOptions>,
}
/// Receivers convert HTTP bodies into messages forwarded to (currently only) message queues, etc.
/// The `payload` field represents the message body given to the producer, and other fields may be
/// added in the future allowing transformations to dynamically customize the producer behavior.
#[derive(Clone, Deserialize, Serialize)]
pub struct ForwardRequest {
/// This is the payload that will be fed into a Receiver Output
// XXX: right now I think any arbitrary json value can work, but individual outputs may have
// more strict requirements.
// The fact this is represented as a field on a json object demands at least that the value can
// be represented in json.
// FIXME: can we leverage RawValue here?
pub payload: serde_json::Value,
}

View File

@@ -0,0 +1,111 @@
# Svix Bridge Example Configuration
# Set the log level for the service. Supported: error, info, warn, debug, trace. Default: info
#log_level: "debug"
# The log format that all output will follow. Supported: default, json
#log_format: "json"
# Optional: configures an OTEL exporter forwarding spans to the specified collector
opentelemetry:
address: "http://localhost:1234"
sample_ratio: 0.5
# Optional: default "svix-bridge"
service_name: "my-bridge"
# The host/port to bind to for incoming HTTP requests.
# Optional: default "0.0.0.0:5000"
# http_listen_address: "0.0.0.0:5000"
# Receivers are HTTP endpoints that can have webhooks sent to them.
# When a webhook is POST'ed to a matching URL, it is (optionally) verified,
# (optionally) transformed via a js function, then forwarded to an "output."
#
# Inputs types are "webhook" which allows you to configure a verification scheme
# (either "svix" or "none") or "svix-webhook" which is a shorthand version.
#
# ```
# input:
# type: "webhook"
# path_id: "long-hand"
# verification:
# type: "svix"
# endpoint_secret: "whsec_XXXXX="
# # same as...
# input:
# type: "svix-webhook"
# path_id: "shorthand"
# endpoint_secret: "whsec_XXXXX="
# ```
#
# The `path_id` in webhook and svix-webhook inputs represents the trailing
# path segment that will connect to the given output.
# For example, running bridge with the HTTP listen address set to
# `localhost:5000`, the above examples would map to the following URLS:
# - http://localhost:5000/webhook/long-hand
# - http://localhost:5000/webhook/shorthand
#
receivers:
- name: "forward-to-gcp-example"
input:
type: "webhook"
path_id: "gcp"
verification:
type: "svix"
endpoint_secret: "whsec_XXXXX="
# Optional - when unset, webhooks received will be forwarded to the output as-is.
transformation: |
function handler(input) {
let event_type = input.eventType;
delete input.eventType;
// N.b. receiver outputs expect to find the message body to publish in the `payload` field.
return { payload: { event_type, ...input }};
}
output:
type: "gcp-pubsub"
topic: "example"
# Optional - falls back to env otherwise, eg.
# - `GOOGLE_APPLICATION_CREDENTIALS`
# - `GOOGLE_APPLICATION_CREDENTIALS_JSON`
credentials_file: "/path/to/creds.json"
- name: "forward-to-rabbitmq-example"
input:
type: "webhook"
path_id: "rabbit"
verification:
type: "svix"
endpoint_secret: "whsec_XXXXX="
output:
type: "rabbitmq"
uri: "amqp://guest:guest@localhost:5672/%2f"
exchange: ""
routing_key: "example"
- name: "forward-to-redis-example"
input:
type: "webhook"
path_id: "redis"
verification:
type: "svix"
endpoint_secret: "whsec_XXXXX="
output:
type: "redis"
dsn: "redis://localhost:1234"
max_connections: 4
queue_key: "my_queue"
- name: "forward-to-sqs-example"
input:
type: "webhook"
path_id: "sqs"
verification:
type: "none"
output:
# Note that the SQS forwarder requires credentials to be set as environment vars:
# - `AWS_DEFAULT_REGION`
# - `AWS_ACCESS_KEY_ID`
# - `AWS_SECRET_ACCESS_KEY`
type: "sqs"
queue_dsn: "https://aws.example.com/my-queue"

View File

@@ -0,0 +1,137 @@
# Svix Bridge Example Configuration
# Set the log level for the service. Supported: error, info, warn, debug, trace. Default: info
#log_level: "debug"
# The log format that all output will follow. Supported: default, json
#log_format: "json"
# Optional: configures an OTEL exporter forwarding spans to the specified collector
opentelemetry:
address: "http://localhost:1234"
sample_ratio: 0.5
# Optional: default "svix-bridge"
service_name: "my-bridge"
# The host/port to bind to for incoming HTTP requests.
# Optional: default "0.0.0.0:5000"
# http_listen_address: "0.0.0.0:5000"
# Senders consume JSON from their input, optionally transform, then forward to Svix.
# Currently supported inputs are various message queue consumers while the sole
# output is "svix" (which does a Create Message API request)
senders:
# GCP Pub/Sub Consumer
- name: "gcp-example"
input:
type: "gcp-pubsub"
subscription_id: "my-subscription"
# Optional - will fallback to looking at env vars when left unset.
credentials_file: "/path/to/credentials.json"
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation:
format: "json"
src: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"
# RabbitMQ Consumer
- name: "rabbitmq-example"
input:
type: "rabbitmq"
# Required
uri: "amqp://guest:guest@localhost:5672/%2f"
# Required
queue_name: "my-queue"
# Optional (default: unset, managed by rabbitmq)
consumer_tag: "my-consumer-001"
# Optional: default true
requeue_on_nack: false
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"
# Redis Consumer
- name: "redis-example"
input:
type: "redis"
# Required
dsn: "redis://localhost:6379/"
# Required
queue_key: "my_queue"
# Required
consumer_name: "my_consumer"
# Required
consumer_group: "my_group"
# Required
max_connections: 4
# Optional: default true
reinsert_on_nack: true
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"
# SQS Consumer
# Also remember to set your AWS credentials in env vars to use this:
# - `AWS_DEFAULT_REGION`
# - `AWS_ACCESS_KEY_ID`
# - `AWS_SECRET_ACCESS_KEY`
- name: "sqs-example"
input:
type: "sqs"
# Required
queue_dsn: "http://localhost:19324/000000000000/local"
# Optional (default: false)
override_endpoint: true
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"

View File

@@ -0,0 +1,51 @@
[package]
name = "svix-bridge"
version = "1.24.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1"
clap = { version = "4.2.4", features = ["env", "derive"] }
axum = { version = "0.6", features = ["macros"] }
enum_dispatch = "0.3"
http = "0.2"
once_cell = "1.18.0"
opentelemetry = "0.22.0"
opentelemetry_sdk = { version = "0.22.1", features = ["metrics", "rt-tokio"] }
opentelemetry-otlp = { version = "0.15.0", features = ["metrics", "grpc-tonic", "http-proto", "reqwest-client"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_yaml = "0.9"
svix-ksuid = "0.7.0"
svix-bridge-plugin-queue = { optional=true, path = "../svix-bridge-plugin-queue" }
svix-bridge-types = { path = "../svix-bridge-types" }
tokio = { version = "1", features = ["full"] }
tracing = "0.1"
tracing-opentelemetry = "0.23.0"
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt", "json"] }
# N.b. for newer deno versions (like this) the runtimes must be retained and reused since they will leak memory if you
# create/drop them.
deno_runtime = "0.125.0"
deno_ast = "0.28.0"
deadpool = { version = "0.9.5", features = ["unmanaged", "rt_tokio_1"] }
shellexpand = { version = "3.1.0", default-features = false, features = ["base-0"] }
[target.'cfg(not(target_env = "msvc"))'.dependencies]
tikv-jemallocator = { version = "0.5", optional = true }
tikv-jemalloc-ctl = { version = "0.5", optional = true, features = ["use_std"] }
[dev-dependencies]
chrono = "0.4"
tower = "0.4"
[features]
default = ["gcp-pubsub", "rabbitmq", "redis", "sqs", "jemalloc"]
gcp-pubsub = ["generic-queue"]
generic-queue = ["dep:svix-bridge-plugin-queue"]
rabbitmq = ["generic-queue"]
redis = ["generic-queue"]
sqs = ["generic-queue"]
jemalloc = ["tikv-jemallocator", "tikv-jemalloc-ctl"]

View File

@@ -0,0 +1,20 @@
Copyright (c) 2021-2023 Svix Inc.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,70 @@
//! Allocator stats are only available when we're using jemalloc, and jemalloc doesn't work on windows.
//!
//! 2 impls for the helper functions are therefore provided. One set that does nothing (for windows)
//! and another that works in the non-windows world.
//!
//! Care should be taken to keep the signatures aligned between these two so the callsites can be
//! used consistently regardless of whether jemalloc is in use or not.
#[cfg(all(not(target_env = "msvc"), feature = "jemalloc"))]
pub use supported::*;
#[cfg(any(target_env = "msvc", not(feature = "jemalloc")))]
pub use unsupported::*;
#[cfg(all(not(target_env = "msvc"), feature = "jemalloc"))]
mod supported {
use std::sync::Arc;
use tikv_jemalloc_ctl::{epoch, stats};
pub struct AllocatorStatMibs {
epoch: tikv_jemalloc_ctl::epoch_mib,
allocated: stats::allocated_mib,
resident: stats::resident_mib,
}
pub fn get_allocator_stats(
bust_cache: bool,
mibs: Arc<AllocatorStatMibs>,
) -> anyhow::Result<Option<(usize, usize)>> {
if bust_cache {
// Stats are cached internally and advancing the epoch is a way to invalidate those caches.
mibs.epoch.advance()?;
}
let allocated = mibs.allocated.read()?;
let resident = mibs.resident.read()?;
Ok(Some((allocated, resident)))
}
pub fn get_allocator_stat_mibs() -> anyhow::Result<Arc<AllocatorStatMibs>> {
let e = epoch::mib()?;
let allocated = stats::allocated::mib()?;
let resident = stats::resident::mib()?;
Ok(Arc::new(AllocatorStatMibs {
epoch: e,
allocated,
resident,
}))
}
}
#[cfg(any(target_env = "msvc", not(feature = "jemalloc")))]
mod unsupported {
use std::sync::Arc;
use anyhow::anyhow;
pub struct AllocatorStatMibs;
pub fn get_allocator_stats(
_bust_cache: bool,
_mibs: Arc<AllocatorStatMibs>,
) -> anyhow::Result<Option<(usize, usize)>> {
Ok(None)
}
pub fn get_allocator_stat_mibs() -> anyhow::Result<Arc<AllocatorStatMibs>> {
Err(anyhow!("metric collection is not supported"))
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,218 @@
use std::{
borrow::Cow,
collections::HashMap,
convert::Infallible,
fmt,
io::{Error, ErrorKind},
net::SocketAddr,
num::NonZeroUsize,
};
use serde::Deserialize;
use shellexpand::LookupError;
use svix_bridge_plugin_queue::config::{
into_receiver_output, QueueConsumerConfig, ReceiverOutputOpts as QueueOutOpts,
};
use svix_bridge_types::{ReceiverInputOpts, ReceiverOutput, SenderInput, TransformationConfig};
use tracing::Level;
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Config {
#[serde(default)]
pub senders: Vec<SenderConfig>,
#[serde(default)]
pub receivers: Vec<ReceiverConfig>,
/// The log level to run the service with. Supported: info, debug, trace
#[serde(default)]
pub log_level: LogLevel,
/// The log format that all output will follow. Supported: default, json
#[serde(default)]
pub log_format: LogFormat,
/// OpenTelemetry exporter settings
#[serde(default)]
pub opentelemetry: Option<OtelExporterConfig>,
#[serde(default = "default_http_listen_address")]
pub http_listen_address: SocketAddr,
#[serde(default = "default_transformation_worker_count")]
pub transformation_worker_count: NonZeroUsize,
}
impl Config {
/// Build a Config from yaml source.
/// Optionally accepts a map to perform variable substitution with.
pub fn from_src(
raw_src: &str,
vars: Option<&HashMap<String, String>>,
) -> std::io::Result<Self> {
let src = if let Some(vars) = vars {
let context = |key: &str| -> Result<Option<Cow<'_, str>>, LookupError<Infallible>> {
Ok(vars.get(key).map(Cow::from))
};
shellexpand::env_with_context(raw_src, context).map_err(|e: LookupError<_>| {
Error::new(
ErrorKind::Other,
format!("Variable substitution failed: {e}"),
)
})?
} else {
Cow::Borrowed(raw_src)
};
let cfg: Self = serde_yaml::from_str(&src)
.map_err(|e| Error::new(ErrorKind::Other, format!("Failed to parse config: {}", e)))?;
for sc in &cfg.senders {
if let Some(tc) = sc.transformation() {
crate::runtime::validate_script(tc.source().as_str()).map_err(|e| {
Error::new(
ErrorKind::Other,
format!(
"failed to parse transformation for sender `{}`: {:?}",
&sc.name(),
e,
),
)
})?;
}
}
for rc in &cfg.receivers {
if let Some(tc) = &rc.transformation {
crate::runtime::validate_script(tc.source().as_str()).map_err(|e| {
Error::new(
ErrorKind::Other,
format!(
"failed to parse transformation for receiver `{}`: {:?}",
&rc.name, e,
),
)
})?;
}
}
Ok(cfg)
}
}
fn default_http_listen_address() -> SocketAddr {
"0.0.0.0:5000".parse().expect("default http listen address")
}
fn default_transformation_worker_count() -> NonZeroUsize {
NonZeroUsize::new(4).expect("4 is greater than 0")
}
#[derive(Deserialize)]
pub struct OtelExporterConfig {
/// The OpenTelemetry service name to use
pub service_name: Option<String>,
/// The OpenTelemetry address to send events to if given.
pub address: String,
/// The ratio at which to sample spans when sending to OpenTelemetry. When not given it defaults
/// to always sending. If the OpenTelemetry address is not set, this will do nothing.
pub sample_ratio: Option<f64>,
}
#[derive(Clone, Debug, Default, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LogLevel {
#[default]
Info,
Debug,
Trace,
}
impl fmt::Display for LogLevel {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Info => Level::INFO,
Self::Debug => Level::DEBUG,
Self::Trace => Level::TRACE,
}
.fmt(f)
}
}
#[derive(Clone, Debug, Default, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum LogFormat {
#[default]
Default,
Json,
}
#[derive(Deserialize)]
#[serde(untagged)]
pub enum SenderConfig {
#[cfg(any(
feature = "gcp-pubsub",
feature = "rabbitmq",
feature = "redis",
feature = "sqs"
))]
QueueConsumer(QueueConsumerConfig),
}
impl SenderConfig {
pub fn name(&self) -> &str {
match self {
SenderConfig::QueueConsumer(cfg) => &cfg.name,
}
}
pub fn transformation(&self) -> Option<&TransformationConfig> {
match self {
SenderConfig::QueueConsumer(cfg) => cfg.transformation.as_ref(),
}
}
}
impl TryFrom<SenderConfig> for Box<dyn SenderInput> {
type Error = &'static str;
fn try_from(value: SenderConfig) -> Result<Self, Self::Error> {
match value {
#[cfg(any(
feature = "gcp-pubsub",
feature = "rabbitmq",
feature = "redis",
feature = "sqs"
))]
SenderConfig::QueueConsumer(backend) => backend.into_sender_input(),
}
}
}
#[derive(Deserialize)]
pub struct ReceiverConfig {
pub name: String,
pub input: ReceiverInputOpts,
#[serde(default)]
pub transformation: Option<TransformationConfig>,
pub output: ReceiverOut,
}
#[derive(Deserialize)]
#[serde(untagged)]
pub enum ReceiverOut {
#[cfg(any(
feature = "gcp-pubsub",
feature = "rabbitmq",
feature = "redis",
feature = "sqs"
))]
QueueProducer(QueueOutOpts),
}
impl ReceiverConfig {
pub async fn into_receiver_output(self) -> std::io::Result<Box<dyn ReceiverOutput>> {
match self.output {
ReceiverOut::QueueProducer(x) => {
into_receiver_output(self.name.clone(), x, self.transformation.as_ref())
.await
.map_err(Into::into)
}
}
}
}
#[cfg(test)]
mod tests;

View File

@@ -0,0 +1,568 @@
use std::collections::HashMap;
use svix_bridge_plugin_queue::config::{QueueConsumerConfig, RabbitMqInputOpts, SenderInputOpts};
use svix_bridge_types::{SenderOutputOpts, SvixSenderOutputOpts};
use super::Config;
use crate::config::{LogFormat, LogLevel, SenderConfig};
/// This is meant to be a kitchen sink config, hitting as many possible
/// configuration options as possible to ensure they parse correctly.
// FIXME: today, largely based on the examples. Should instead focus on coverage.
const OMNIBUS: &str = r#"
# Svix Bridge Example Configuration
# Set the log level for the service. Supported: error, info, warn, debug, trace. Default: info
#log_level: "debug"
# The log format that all output will follow. Supported: default, json
#log_format: "json"
# Optional: configures an OTEL exporter forwarding spans to the specified collector
opentelemetry:
address: "http://localhost:1234"
sample_ratio: 0.5
# Optional: default "svix-bridge"
service_name: "my-bridge"
# The host/port to bind to for incoming HTTP requests.
# Optional: default 0.0.0.0:5000
# http_listen_address: "0.0.0.0:5000"
# Senders consume JSON from their input, optionally transform, then forward to Svix.
# Currently supported inputs are various message queue consumers while the sole
# output is "svix" (which does a Create Message API request)
senders:
# GCP Pub/Sub Consumer
- name: "gcp-example"
input:
type: "gcp-pubsub"
subscription_id: "my-subscription"
# Optional - will fallback to looking at env vars when left unset.
credentials_file: "/path/to/credentials.json"
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"
# RabbitMQ Consumer
- name: "rabbitmq-example"
input:
type: "rabbitmq"
# Required
uri: "amqp://guest:guest@localhost:5672/%2f"
# Required
queue_name: "my-queue"
# Optional (default: unset, managed by rabbitmq)
consumer_tag: "my-consumer-001"
# Optional: default true
requeue_on_nack: false
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"
# Redis Consumer
- name: "redis-example"
input:
type: "redis"
# Required
dsn: "redis://localhost:6379/"
# Required
queue_key: "my_queue"
# Required
consumer_name: "my_consumer"
# Required
consumer_group: "my_group"
# Required
max_connections: 4
# Optional: default true
reinsert_on_nack: true
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"
# SQS Consumer
# Also remember to set your AWS credentials in env vars to use this:
# - `AWS_DEFAULT_REGION`
# - `AWS_ACCESS_KEY_ID`
# - `AWS_SECRET_ACCESS_KEY`
- name: "sqs-example"
input:
type: "sqs"
# Required
queue_dsn: "http://localhost:19324/000000000000/local"
# Optional (default: false)
override_endpoint: true
# Optional - when unset, messages from the queue will be sent to Svix as-is.
transformation: |
function handler(input) {
return {
appId: input.key,
message: {
eventType: input.event_type,
payload: input.data
}
};
}
output:
type: "svix"
# Required (the Svix token to use when creating messages with this consumer)
token: "XYZ"
# Receivers are HTTP endpoints that can have webhooks sent to them.
# When a webhook is POST'ed to a matching URL, it is (optionally) verified,
# (optionally) transformed via a js function, then forwarded to an "output."
#
# Inputs types are "webhook" which allows you to configure a verification scheme
# (either "svix" or "none") or "svix-webhook" which is a shorthand version.
#
# ```
# input:
# type: "webhook"
# path_id: "long-hand"
# verification:
# type: "svix"
# endpoint_secret: "whsec_XXXXX="
# # same as...
# input:
# type: "svix-webhook"
# path_id: "shorthand"
# endpoint_secret: "whsec_XXXXX="
# ```
#
# The `path_id` in webhook and svix-webhook inputs represents the trailing
# path segment that will connect to the given output.
# For example, running bridge with the HTTP listen address set to
# `localhost:5000`, the above examples would map to the following URLS:
# - http://localhost:5000/webhooks/long-hand
# - http://localhost:5000/webhooks/shorthand
#
receivers:
- name: "forward-to-gcp-example"
input:
type: "webhook"
path_id: "gcp"
verification:
type: "svix"
endpoint_secret: "whsec_XXXXX="
# Optional - when unset, webhooks received will be forwarded to the output as-is.
transformation: |
function handler(input) {
let event_type = input.eventType;
delete input.eventType;
return { event_type, ...input };
}
output:
type: "gcp-pubsub"
topic: "example"
# Optional - falls back to env otherwise, eg.
# - `GOOGLE_APPLICATION_CREDENTIALS`
# - `GOOGLE_APPLICATION_CREDENTIALS_JSON`
credentials_file: "/path/to/creds.json"
- name: "forward-to-rabbitmq-example"
input:
type: "webhook"
path_id: "rabbit"
verification:
type: "svix"
endpoint_secret: "whsec_XXXXX="
output:
type: "rabbitmq"
uri: "amqp://guest:guest@localhost:5672/%2f"
exchange: ""
routing_key: "example"
- name: "forward-to-redis-example"
input:
type: "webhook"
path_id: "redis"
verification:
type: "svix"
endpoint_secret: "whsec_XXXXX="
output:
type: "redis"
dsn: "redis://localhost:1234"
max_connections: 4
queue_key: "my_queue"
- name: "forward-to-sqs-example"
input:
type: "webhook"
path_id: "sqs"
verification:
type: "none"
output:
# Note that the SQS forwarder requires credentials to be set as environment vars:
# - `AWS_DEFAULT_REGION`
# - `AWS_ACCESS_KEY_ID`
# - `AWS_SECRET_ACCESS_KEY`
type: "sqs"
queue_dsn: "https://example.aws.com/my-queue"
"#;
#[test]
fn test_sender_parses_ok() {
let conf: Result<SenderConfig, _> = serde_yaml::from_str(
r#"
name: "from-rabbit-local-to-svix"
input:
type: "rabbitmq"
queue_name: "local"
uri: "amqp://example.com/%2f"
transformation: |
handler = (x) => ({ appId: "app_1234", message: { eventType: "foo.bar", payload: x }})
output:
type: "svix"
token: "XXXX"
"#,
);
conf.unwrap();
}
#[test]
fn test_senders_parses_ok() {
let conf: Result<Vec<SenderConfig>, _> = serde_yaml::from_str(
r#"
- name: "from-rabbit-local-to-svix"
input:
type: "rabbitmq"
queue_name: "local"
uri: "amqp://example.com/%2f"
# Implicit json transformation
transformation: |
handler = (x) => ({ appId: "app_1234", message: { eventType: "foo.bar", payload: x }})
output:
type: "svix"
token: "XXXX"
- name: "from-SQS-to-svix"
input:
type: "sqs"
queue_dsn: "http://sqs.example.com/foo/bar"
# Explicit string transformation
transformation:
format: string
src: |
function handler(x) {
return { appId: "app_1234", message: { eventType: "foo.bar", payload: x }}
}
output:
type: "svix"
token: "YYYY"
"#,
);
let conf = conf.unwrap();
assert_eq!(conf.len(), 2);
}
#[test]
fn test_omnibus_parses_ok() {
let conf: Result<Config, _> = serde_yaml::from_str(OMNIBUS);
conf.unwrap();
}
#[test]
fn test_empty() {
let conf: Config = serde_yaml::from_str("").unwrap();
assert!(conf.senders.is_empty());
assert!(conf.receivers.is_empty());
assert_eq!(conf.http_listen_address, "0.0.0.0:5000".parse().unwrap());
assert!(conf.opentelemetry.is_none());
assert!(matches!(conf.log_format, LogFormat::Default));
assert!(matches!(conf.log_level, LogLevel::Info));
}
/// Don't particularly care about the parsed specifics here.
/// This is more about making sure the examples we have in the repo actually parse.
#[test]
fn test_receivers_example() {
let fp = concat!(
env!("CARGO_MANIFEST_DIR"),
"/../svix-bridge.example.receivers.yaml"
);
let conf: Config = serde_yaml::from_slice(&std::fs::read(fp).unwrap()).unwrap();
assert!(conf.senders.is_empty());
assert!(!conf.receivers.is_empty());
}
/// Don't particularly care about the parsed specifics here.
/// This is more about making sure the examples we have in the repo actually parse.
#[test]
fn test_senders_example() {
let fp = concat!(
env!("CARGO_MANIFEST_DIR"),
"/../svix-bridge.example.senders.yaml"
);
let conf: Config = serde_yaml::from_slice(&std::fs::read(fp).unwrap()).unwrap();
assert!(!conf.senders.is_empty());
assert!(conf.receivers.is_empty());
}
#[test]
fn test_variable_substitution_missing_vars() {
let src = r#"
opentelemetry:
address: "${OTEL_ADDR}"
"#;
let vars = HashMap::new();
let cfg = Config::from_src(src, Some(&vars)).unwrap();
let otel = cfg.opentelemetry.unwrap();
// when lookups in the vars map fail, the original token text is preserved.
assert_eq!(&otel.address, "${OTEL_ADDR}");
}
#[test]
fn test_variable_substitution_available_vars() {
let src = r#"
opentelemetry:
address: "${OTEL_ADDR}"
sample_ratio: ${OTEL_SAMPLE_RATIO}
"#;
let mut vars = HashMap::new();
vars.insert(
String::from("OTEL_ADDR"),
String::from("http://127.0.0.1:8080"),
);
vars.insert(String::from("OTEL_SAMPLE_RATIO"), String::from("0.25"));
let cfg = Config::from_src(src, Some(&vars)).unwrap();
// when lookups succeed, the token should be replaced.
let otel = cfg.opentelemetry.unwrap();
assert_eq!(&otel.address, "http://127.0.0.1:8080");
assert_eq!(otel.sample_ratio, Some(0.25));
}
#[test]
fn test_variable_substitution_braces_optional() {
let src = r#"
opentelemetry:
# Formerly failing to use ${} notation means the port number would not be substituted.
# Today, it works. Test that it continues to.
address: "${OTEL_SCHEME}://${OTEL_HOST}:$OTEL_PORT"
"#;
let mut vars = HashMap::new();
vars.insert(String::from("OTEL_SCHEME"), String::from("https"));
vars.insert(String::from("OTEL_HOST"), String::from("127.0.0.1"));
vars.insert(String::from("OTEL_PORT"), String::from("9999"));
let cfg = Config::from_src(src, Some(&vars)).unwrap();
// when lookups succeed, the token should be replaced.
let otel = cfg.opentelemetry.unwrap();
// Not the user-intended outcome, but it simplifies the parsing requirements.
assert_eq!(&otel.address, "https://127.0.0.1:9999");
}
#[test]
fn test_variable_substitution_missing_numeric_var_is_err() {
// Unfortunate side-effect of templating yaml.
//
// If the variable is missing, usually you've got three options:
// - retain the token text that failed the lookup (envsubst-rs does this)
// - replace the token with an empty string (the CLI `envsubst` does this)
// - mark it an error (neither do this, but we can if we roll our own impl)
//
// For yaml, the field typings are heavily/poorly inferred so for an optional float like
// `sample_ratio` an empty string would parse as a `None`, which could be a bad fallback since
// otel considers this a 1.0 ratio (send everything).
//
// For this specific case, retaining the token text produces an error, which happens to be useful.
// For fields that happen to be strings anyway, errors may show up later (after the config parsing).
// Ex: using `${QUEUE_NAME}` in a rabbit sender input will surface in logs as an error when we
// try to connect: "no such queue '${QUEUE_NAME}'".
let src = r#"
opentelemetry:
address: "${OTEL_ADDR}"
# This var will be missing, causing the template token to
# be retained causing a parse failure :(
sample_ratio: ${OTEL_SAMPLE_RATIO}
"#;
let vars = HashMap::new();
let err = Config::from_src(src, Some(&vars)).err().unwrap();
let want = "Failed to parse config: opentelemetry.sample_ratio: invalid type: \
string \"${OTEL_SAMPLE_RATIO}\", expected f64 at line 6 column 23";
assert_eq!(want, err.to_string());
}
/// This is probably a given, but we should expect a single variable can be referenced multiple
/// times within the config.
/// The concrete use case: auth tokens.
#[test]
fn test_variable_substitution_repeated_lookups() {
let src = r#"
senders:
- name: "rabbitmq-1"
input:
type: "rabbitmq"
uri: "${RABBIT_URI}"
queue_name: "${QUEUE_NAME_1}"
output:
type: "svix"
token: "${SVIX_TOKEN}"
- name: "rabbitmq-2"
input:
type: "rabbitmq"
uri: "${RABBIT_URI}"
queue_name: "${QUEUE_NAME_2}"
output:
type: "svix"
token: "${SVIX_TOKEN}"
"#;
let mut vars = HashMap::new();
vars.insert(
String::from("RABBIT_URI"),
String::from("amqp://guest:guest@localhost:5672/%2f"),
);
vars.insert(String::from("QUEUE_NAME_1"), String::from("one"));
vars.insert(String::from("QUEUE_NAME_2"), String::from("two"));
vars.insert(String::from("SVIX_TOKEN"), String::from("x"));
let cfg = Config::from_src(src, Some(&vars)).unwrap();
if let SenderConfig::QueueConsumer(QueueConsumerConfig {
input:
SenderInputOpts::RabbitMQ(RabbitMqInputOpts {
uri, queue_name, ..
}),
output: SenderOutputOpts::Svix(SvixSenderOutputOpts { token, .. }),
..
}) = &cfg.senders[0]
{
assert_eq!(uri, "amqp://guest:guest@localhost:5672/%2f");
assert_eq!(queue_name, "one");
assert_eq!(token, "x");
} else {
panic!("sender did not match expected pattern");
}
if let SenderConfig::QueueConsumer(QueueConsumerConfig {
input:
SenderInputOpts::RabbitMQ(RabbitMqInputOpts {
uri, queue_name, ..
}),
output: SenderOutputOpts::Svix(SvixSenderOutputOpts { token, .. }),
..
}) = &cfg.senders[1]
{
assert_eq!(uri, "amqp://guest:guest@localhost:5672/%2f");
assert_eq!(queue_name, "two");
assert_eq!(token, "x");
} else {
panic!("sender did not match expected pattern");
}
}
/// This is to ensure the order of operations.
/// Variables in js source code fragments could result in invalid JS source.
/// Now that we are validating transformations parse as JS, this test aims to make sure the
/// replacements happen before the JS parsing does.
#[test]
fn test_variable_substitution_in_transformations() {
let src = r#"
senders:
- name: "rabbitmq-1"
input:
type: "rabbitmq"
uri: "${RABBIT_URI}"
queue_name: "${QUEUE_NAME}"
transformation: |
function handler(input) {
return {
appId: "xxx",
message: {
eventType: "queue.message.handled",
payload: {
queueName: "${QUEUE_NAME}",
// Without the substitution for NUMBER, this expression would be invalid syntax.
number: ${NUMBER} - 10,
data: input,
}
}
};
}
output:
type: "svix"
token: "${SVIX_TOKEN}"
"#;
let mut vars = HashMap::new();
vars.insert(String::from("NUMBER"), String::from("123"));
vars.insert(String::from("QUEUE_NAME"), String::from("one"));
let cfg = Config::from_src(src, Some(&vars)).unwrap();
let xform = cfg.senders[0].transformation().unwrap();
xform.source().contains(r#"queueName: "one""#);
xform.source().contains(r#"number: 123 - 10,"#);
}
/// Check that the config parser validates the JS source fragments in it.
#[test]
fn test_transformation_validation_bad_syntax_is_err() {
let src = r#"
senders:
- name: "bad xform"
input:
type: "rabbitmq"
uri: "xxx"
queue_name: "xxx"
transformation: |
// invalid syntax
let 123 = 456
output:
type: "svix"
token: "xxx"
"#;
let err = Config::from_src(src, None).err().unwrap();
assert!(err
.to_string()
.contains("failed to parse transformation for sender `bad xform`"))
}
#[test]
fn test_var_substitution_json_values_ok() {
let src = r#""#;
let mut vars = HashMap::new();
// GCP credentials is one place where we _expect_ json to be supplied as an env var.
// We need to be able to support this.
vars.insert(
"GOOGLE_APPLICATION_CREDENTIALS_JSON".into(),
r#"{"foo": true, "bar": 123}"#.into(),
);
// Should not be an error
let _cfg = Config::from_src(src, Some(&vars)).unwrap();
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,345 @@
use std::{
io::{Error, ErrorKind, Result},
path::PathBuf,
time::Duration,
};
use clap::Parser;
use once_cell::sync::Lazy;
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::{
metrics::{data::Temporality, reader::TemporalitySelector, InstrumentKind, SdkMeterProvider},
runtime::Tokio,
};
use svix_bridge_types::{SenderInput, TransformerJob};
use svix_ksuid::{KsuidLike as _, KsuidMs};
#[cfg(all(not(target_env = "msvc"), feature = "jemalloc"))]
use tikv_jemallocator::Jemalloc;
use tracing::Instrument;
use tracing_subscriber::{layer::SubscriberExt as _, util::SubscriberInitExt as _};
use self::config::Config;
mod allocator;
mod config;
mod metrics;
mod runtime;
mod webhook_receiver;
use crate::{
allocator::{get_allocator_stat_mibs, get_allocator_stats},
metrics::CommonMetrics,
};
#[cfg(all(not(target_env = "msvc"), feature = "jemalloc"))]
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
#[cfg(all(target_env = "msvc", feature = "jemalloc"))]
compile_error!("jemalloc cannot be enabled on msvc");
// Seems like it would be useful to be able to configure this.
// In some docker setups, hostname is sometimes the container id, and advertising this can be
// helpful.
static INSTANCE_ID: Lazy<String> = Lazy::new(|| KsuidMs::new(None, None).to_string());
fn get_svc_identifiers(cfg: &Config) -> opentelemetry_sdk::Resource {
opentelemetry_sdk::Resource::new(vec![
opentelemetry::KeyValue::new(
"service.name",
cfg.opentelemetry
.as_ref()
.and_then(|x| x.service_name.as_deref())
.unwrap_or("svix-bridge")
.to_owned(),
),
opentelemetry::KeyValue::new("instance_id", INSTANCE_ID.to_owned()),
])
}
fn setup_tracing(cfg: &Config) {
let filter_directives = std::env::var("RUST_LOG").unwrap_or_else(|e| {
if let std::env::VarError::NotUnicode(_) = e {
eprintln!("RUST_LOG environment variable has non-utf8 contents, ignoring!");
}
const CRATE_NAME: &str = env!("CARGO_CRATE_NAME");
let level = cfg.log_level.to_string();
let var = [
format!("{CRATE_NAME}={level}"),
// XXX: Assuming this applies to the Producer side (aka `og-ingester`) when we fold it back in.
format!("tower_http={level}"),
];
var.join(",")
});
let otel_layer = cfg.opentelemetry.as_ref().map(|otel_cfg| {
// Configure the OpenTelemetry tracing layer
opentelemetry::global::set_text_map_propagator(
opentelemetry_sdk::propagation::TraceContextPropagator::new(),
);
let exporter = opentelemetry_otlp::new_exporter()
.tonic()
.with_endpoint(&otel_cfg.address);
let tracer = opentelemetry_otlp::new_pipeline()
.tracing()
.with_exporter(exporter)
.with_trace_config(
opentelemetry_sdk::trace::config()
.with_sampler(
otel_cfg
.sample_ratio
.map(opentelemetry_sdk::trace::Sampler::TraceIdRatioBased)
.unwrap_or(opentelemetry_sdk::trace::Sampler::AlwaysOn),
)
.with_resource(get_svc_identifiers(cfg)),
)
.install_batch(Tokio)
.unwrap();
tracing_opentelemetry::layer().with_tracer(tracer)
});
// Then create a subscriber with an additional layer printing to stdout.
// This additional layer is either formatted normally or in JSON format.
match cfg.log_format {
config::LogFormat::Default => {
let stdout_layer = tracing_subscriber::fmt::layer();
tracing_subscriber::Registry::default()
.with(otel_layer)
.with(stdout_layer)
.with(tracing_subscriber::EnvFilter::new(filter_directives))
.init()
}
config::LogFormat::Json => {
let fmt = tracing_subscriber::fmt::format().json().flatten_event(true);
let json_fields = tracing_subscriber::fmt::format::JsonFields::new();
let stdout_layer = tracing_subscriber::fmt::layer()
.event_format(fmt)
.fmt_fields(json_fields);
tracing_subscriber::Registry::default()
.with(otel_layer)
.with(stdout_layer)
.with(tracing_subscriber::EnvFilter::new(filter_directives))
.init()
}
};
}
/// Delta temporality selector as recommended by upstream:
/// https://github.com/open-telemetry/opentelemetry-rust/discussions/1511#discussioncomment-8386721
struct DeltaTemporalitySelector;
impl TemporalitySelector for DeltaTemporalitySelector {
fn temporality(&self, kind: InstrumentKind) -> Temporality {
match kind {
InstrumentKind::UpDownCounter => Temporality::Cumulative,
InstrumentKind::ObservableUpDownCounter => Temporality::Cumulative,
_ => Temporality::Delta,
}
}
}
pub fn setup_metrics(cfg: &Config) -> Option<SdkMeterProvider> {
cfg.opentelemetry.as_ref().map(|otel_cfg| {
let exporter = opentelemetry_otlp::new_exporter()
.tonic()
.with_endpoint(&otel_cfg.address);
opentelemetry_otlp::new_pipeline()
.metrics(Tokio)
.with_temporality_selector(DeltaTemporalitySelector)
.with_exporter(exporter)
.with_resource(get_svc_identifiers(cfg))
.build()
.unwrap()
})
}
async fn supervise_senders(inputs: Vec<Box<dyn SenderInput>>) -> Result<()> {
let mut set = tokio::task::JoinSet::new();
for input in inputs {
set.spawn(async move {
// FIXME: needs much better signaling for termination
loop {
let fut = input.run();
// If this future returns, the consumer terminated unexpectedly.
if let Err(e) = fut.await {
tracing::warn!(
"sender input {} unexpectedly terminated: {}",
input.name(),
e
);
} else {
tracing::warn!("sender input {} unexpectedly terminated", input.name());
}
tokio::time::sleep(Duration::from_secs(1)).await;
}
});
}
// FIXME: add signal handling to trigger a (intentional) graceful shutdown.
// FIXME: when a plugin exits unexpectedly, what do?
// Most consumers are probably stateful/brittle and may disconnect from time to time.
// Ideally none of these tasks would ever return Ok or Err. They'd run forever.
// Having the tasks themselves try to recover means if we see a task finish here, something
// must be really wrong, so maybe we trigger a shutdown of the rest when one stops here.
while let Some(_res) = set.join_next().await {
// In order for plugins to coordinate a shutdown, maybe they could:
// - have a shutdown method and handle their own internal signalling, or maybe
// - take a oneshot channel as an arg to `run()`
// Basically we need something that formalizes the shutdown flow in a cross-crate
// friendly way.
todo!("graceful shutdown");
}
Ok(())
}
#[derive(Parser)]
pub struct Args {
#[arg(long, env = "SVIX_BRIDGE_CFG_FILE", help = "Path to the config file.")]
cfg_file: Option<PathBuf>,
#[arg(
long,
env = "SVIX_BRIDGE_CFG",
help = "Config data as a string (instead of a file on disk).",
conflicts_with = "cfg_file"
)]
cfg: Option<String>,
}
#[tokio::main]
async fn main() -> Result<()> {
let args = Args::parse();
let mut config_search_paths = vec![];
if let Some(fp) = args.cfg_file {
config_search_paths.push(fp)
} else {
for name in ["svix-bridge.yaml", "svix-bridge.yml", "svix-bridge.json"] {
config_search_paths.push(std::env::current_dir().expect("current dir").join(name));
}
}
// Clap will ensure we have only one or the other (cfg and cfg_file can't be specified together).
let cfg_source = match args.cfg {
Some(cfg_source) => cfg_source,
None => {
let fp = config_search_paths
.into_iter()
.find(|x| x.exists())
.expect("config file path");
std::fs::read_to_string(&fp).map_err(|e| {
let p = fp.into_os_string().into_string().expect("config file path");
Error::new(ErrorKind::Other, format!("Failed to read {p}: {e}"))
})
}?,
};
let vars = std::env::vars().collect();
let cfg = Config::from_src(&cfg_source, Some(vars).as_ref())?;
setup_tracing(&cfg);
let _metrics = setup_metrics(&cfg);
tracing::info!("starting");
tokio::spawn(
async move {
let mut interval = tokio::time::interval(Duration::from_secs(15));
let metrics = CommonMetrics::new(&opentelemetry::global::meter("svix.com"));
match get_allocator_stat_mibs() {
Ok(mibs) => {
tracing::debug!("Common Metrics Collection: Started");
loop {
interval.tick().await;
if let Ok(Some((allocated, resident))) =
get_allocator_stats(true, mibs.clone())
{
metrics.record_mem_allocated(allocated as _);
metrics.record_mem_resident(resident as _);
}
}
}
Err(e) => tracing::error!("Unable to get allocator stats mibs: {e}"),
}
}
.instrument(tracing::error_span!(
"common_metrics_collector",
instance_id = tracing::field::Empty
)),
);
let (xform_tx, mut xform_rx) = tokio::sync::mpsc::unbounded_channel::<TransformerJob>();
// XXX: this is a bit nasty, but might be okay to start.
// The nested spawns are needed to make sure we can saturate the
// threadpool (otherwise we'd run each job serially).
//
// Another approach would be to do what og-ingester did: give each plugin a clone of the
// `TpHandle`, but this would likely mean moving the runtime module over to the `-types` crate.
// I'd rather not do this, mostly to help keep things more unit test friendly; channels can
// help keep the coupling more loose, with less stateful baggage.
// Starting with this just to keep the JS executor stuff here in the binary.
tokio::spawn(async move {
tracing::info!(
"Starting JS Transformation Workers: {}",
cfg.transformation_worker_count
);
let pooler = runtime::JsPooler::new(cfg.transformation_worker_count);
while let Some(TransformerJob {
input,
script,
callback_tx,
}) = xform_rx.recv().await
{
let tp = pooler.clone();
tokio::spawn(async move {
let out = tp.run_script(input, script).await;
// FIXME: seeing this Err case come up during load testing.
// Seems like we shouldn't be hitting this so easily while the process is not terminating.
// Regularly there are group error log lines that show up right at the end of an
// `oha` run, POSTing to receivers. Need to investigate why.
if callback_tx
.send(out.map_err(|e| tracing::error!("{:?}", e)))
.is_err()
{
// If the callback fails, the plugin is likely unwinding/dropping.
// Not a whole lot we can do about that.
tracing::error!("failed to send js output back to caller");
}
});
}
});
let mut senders = Vec::with_capacity(cfg.senders.len());
for sc in cfg.senders {
let mut sender: Box<dyn SenderInput> =
sc.try_into().map_err(|e| Error::new(ErrorKind::Other, e))?;
sender.set_transformer(Some(xform_tx.clone()));
senders.push(sender);
}
if senders.is_empty() {
tracing::warn!("No senders configured.")
}
let senders_fut = supervise_senders(senders);
if cfg.receivers.is_empty() {
tracing::warn!("No receivers configured.")
}
let receivers_fut = webhook_receiver::run(cfg.http_listen_address, cfg.receivers, xform_tx);
match tokio::try_join!(senders_fut, receivers_fut) {
Ok(_) => tracing::error!("unexpectedly exiting"),
Err(e) => tracing::error!("unexpectedly exiting: {}", e),
}
tracing::info!("exiting...");
Ok(())
}

View File

@@ -0,0 +1,43 @@
use opentelemetry::metrics::{Meter, ObservableGauge};
fn init_metric<T, E: std::fmt::Display>(result: Result<T, E>) -> Option<T> {
match result {
Ok(t) => Some(t),
Err(e) => {
tracing::error!("Failed to initialize metric: {}", e);
None
}
}
}
#[derive(Clone)]
pub struct CommonMetrics {
mem_allocated_recorder: Option<ObservableGauge<u64>>,
mem_resident_recorder: Option<ObservableGauge<u64>>,
}
impl CommonMetrics {
pub fn new(meter: &Meter) -> Self {
let mem_resident_recorder =
init_metric(meter.u64_observable_gauge("svix.mem_resident").try_init());
let mem_allocated_recorder =
init_metric(meter.u64_observable_gauge("svix.mem_allocated").try_init());
Self {
mem_allocated_recorder,
mem_resident_recorder,
}
}
pub fn record_mem_allocated(&self, value: u64) {
if let Some(ref recorder) = self.mem_allocated_recorder {
recorder.observe(value, &[]);
}
}
pub fn record_mem_resident(&self, value: u64) {
if let Some(ref recorder) = self.mem_resident_recorder {
recorder.observe(value, &[]);
}
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,144 @@
use std::num::NonZeroUsize;
use anyhow::Result;
use deadpool::unmanaged::Pool;
use deno_ast::{MediaType, ParseParams, SourceTextInfo};
use deno_runtime::deno_core::{
serde_v8,
v8::{self},
JsRuntime,
};
use svix_bridge_types::{JsObject, TransformerInput, TransformerOutput};
use tokio::sync::oneshot;
struct Executor {
tx: std::sync::mpsc::Sender<Job>,
_handle: std::thread::JoinHandle<()>,
}
impl Default for Executor {
fn default() -> Self {
let (tx, rx) = std::sync::mpsc::channel::<Job>();
let _handle = std::thread::spawn(move || {
let mut runtime = JsRuntime::new(Default::default());
for Job { input, script, cb } in rx {
let ret = run_script_inner(&mut runtime, input, script);
if cb.send(ret).is_err() {
tracing::error!("failed to send script output to caller");
}
}
});
Self { tx, _handle }
}
}
type Callback = oneshot::Sender<Result<TransformerOutput>>;
struct Job {
input: TransformerInput,
script: String,
cb: Callback,
}
impl Executor {
async fn execute(
&mut self,
input: TransformerInput,
script: String,
) -> Result<TransformerOutput> {
let (tx, rx) = oneshot::channel();
self.tx.send(Job {
input,
script,
cb: tx,
})?;
rx.await?
}
}
#[derive(Clone)]
pub struct JsPooler {
executors: Pool<Executor>,
}
impl JsPooler {
pub fn new(pool_size: NonZeroUsize) -> Self {
let pool_size = pool_size.get();
let mut items = Vec::with_capacity(pool_size);
for _ in 0..pool_size {
items.push(Executor::default());
}
Self {
executors: Pool::from(items),
}
}
pub async fn run_script(
&self,
input: TransformerInput,
script: String,
) -> Result<TransformerOutput> {
let pool = self.executors.clone();
let mut executor = pool.get().await;
executor
.as_mut()
.map_err(|e| anyhow::anyhow!("{e:?}"))?
.execute(input, script)
.await
}
}
/// Checks that the input parses as valid JavaScript, giving the parser's error back on failure.
pub fn validate_script(src: &str) -> Result<()> {
Ok(deno_ast::parse_script(ParseParams {
specifier: "file:///x.js".to_string(),
text_info: SourceTextInfo::new(src.into()),
media_type: MediaType::JavaScript,
capture_tokens: false,
scope_analysis: false,
maybe_syntax: None,
})
.map(|_| ())?)
}
fn run_script_inner(
runtime: &mut JsRuntime,
input: TransformerInput,
script: String,
) -> Result<TransformerOutput> {
let input = serde_json::to_string(&input)?;
let res = runtime.execute_script(
"<anon>",
format!(
// Wrap the user script, and invocation of `handler`, in a self-calling closure.
// The hope is we'll prevent the globals space from being polluted call after call.
r#"
(function () {{
{script}
return handler({});
}})()
"#,
input
)
.into(),
);
match res {
Ok(global) => {
let scope = &mut runtime.handle_scope();
let local = v8::Local::new(scope, global);
match serde_v8::from_v8::<JsObject>(scope, local) {
Ok(v) => Ok(TransformerOutput::Object(v)),
Err(serde_v8::Error::ExpectedObject(msg)) => {
tracing::error!("{msg}");
Ok(TransformerOutput::Invalid)
}
Err(e) => Err(e)?,
}
}
Err(err) => Err(anyhow::format_err!("Evaling error: {:?}", err)),
}
}
#[cfg(test)]
mod tests;

View File

@@ -0,0 +1,137 @@
use deno_runtime::deno_core::JsRuntime;
use serde_json::json;
use svix_bridge_types::{TransformerInput, TransformerOutput};
use super::{run_script_inner, validate_script};
fn get_test_rt() -> JsRuntime {
JsRuntime::new(Default::default())
}
// Really just trying to figure out if the deno runtime is working the way I hope.
#[test]
fn test_happy_fn() {
let src = r#"
function handler(input) {
return { "x": 123, ...input };
}
"#
.to_string();
let mut rt = get_test_rt();
let res = run_script_inner(&mut rt, json!({ "y": 456 }).into(), src).unwrap();
match res {
TransformerOutput::Object(v) => {
assert_eq!(v["x"].as_i64(), Some(123));
assert_eq!(v["y"].as_i64(), Some(456));
}
TransformerOutput::Invalid => panic!("got unexpected return value"),
}
}
#[test]
fn test_invalid_output_bool() {
let src = r#"
function handler(input) {
return false;
}
"#
.to_string();
let mut rt = get_test_rt();
let res = run_script_inner(&mut rt, json!({}).into(), src).unwrap();
match res {
TransformerOutput::Invalid => (),
TransformerOutput::Object(_) => panic!("got unexpected return value"),
}
}
#[test]
// FIXME: serde decodes arrays with keys like "0", "1"... in this situation, failing the test.
#[ignore]
fn test_invalid_output_array() {
let src = r#"
function handler(input) {
return [1, 2];
}
"#
.to_string();
let mut rt = get_test_rt();
let res = run_script_inner(&mut rt, json!({}).into(), src).unwrap();
match res {
TransformerOutput::Invalid => (),
TransformerOutput::Object(_) => {
panic!("got unexpected return value");
}
}
}
/// Receives a string input, parses as JSON in js, then returns the result back to rust.
#[test]
fn test_string_input() {
let src = r#"
function handler(input) {
return JSON.parse(input);
}
"#
.to_string();
let mut rt = get_test_rt();
let res = run_script_inner(
&mut rt,
TransformerInput::String(String::from(r#"{"x": 123}"#)),
src,
)
.unwrap();
match res {
TransformerOutput::Object(v) => {
assert_eq!(v["x"].as_i64(), Some(123));
}
TransformerOutput::Invalid => (),
}
}
/// Take the string input and just add it to a field in the returned object.
/// The string should make it through, back to rust, as-is.
#[test]
fn test_string_input2() {
let src = r#"
function handler(input) {
return { "payload": input };
}
"#
.to_string();
let mut rt = get_test_rt();
let res = run_script_inner(
&mut rt,
TransformerInput::String(String::from("Hello World")),
src,
)
.unwrap();
match res {
TransformerOutput::Object(v) => {
assert_eq!(v["payload"].as_str(), Some("Hello World"));
}
TransformerOutput::Invalid => (),
}
}
#[test]
fn test_validate_script_bad_syntax_is_err() {
assert!(validate_script("let 123 = ';").is_err());
}
#[test]
fn test_validate_script_empty_handler_is_ok() {
assert!(validate_script("function handler() { }").is_ok());
}
#[test]
fn test_validate_script_arrow_fn_is_ok() {
assert!(validate_script("const handler = () => ({ a: 123 })").is_ok());
}
/// Technically, this should be legal though the utility is questionable.
#[test]
fn test_validate_script_empty_is_ok() {
assert!(validate_script("").is_ok());
assert!(validate_script(" ").is_ok());
}

View File

@@ -0,0 +1,18 @@
use serde::Deserialize;
use svix_bridge_types::{TransformationConfig, WebhookVerifier};
use crate::config::ReceiverConfig;
/// The [`IntegrationConfig`] is the struct associated with a given [`IntegrationId`]. When the route
/// associated with an [`IntegrationId`] receives a webhook, or any other HTTP request, then it will
/// attempt to be validated with the specified [`VerificationScheme`]. Should the configured scheme
/// indicate that the webhook is valid, then the webhook will be forwarded verbatim to the configured
/// [`ForwardDestination`].
#[derive(Deserialize)]
#[allow(dead_code)]
pub struct IntegrationConfig {
pub receiver_cfg: ReceiverConfig,
pub verification: WebhookVerifier,
#[serde(default)]
pub transformation: Option<TransformationConfig>,
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,185 @@
use std::net::SocketAddr;
use axum::{
body::Body,
extract::{Path, State},
routing::post,
Router,
};
use svix_bridge_types::{
ForwardRequest, TransformationConfig, TransformerInput, TransformerInputFormat, TransformerJob,
TransformerOutput, TransformerTx,
};
use tracing::instrument;
use types::{IntegrationId, IntegrationState, InternalState, SerializableRequest, Unvalidated};
use crate::{config::ReceiverConfig, webhook_receiver::types::SerializablePayload};
mod config;
mod types;
mod verification;
fn router() -> Router<InternalState, Body> {
Router::new()
.route(
"/webhook/:integration_id",
post(route).put(route).get(route).patch(route),
)
.route(
"/webhook/:integration_id/",
post(route).put(route).get(route).patch(route),
)
}
pub async fn run(
listen_addr: SocketAddr,
routes: Vec<ReceiverConfig>,
transformer_tx: TransformerTx,
) -> std::io::Result<()> {
let state = InternalState::from_receiver_configs(routes, transformer_tx)
.await
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;
let router = router().with_state(state);
tracing::info!("Listening on: {listen_addr}");
axum::Server::bind(&listen_addr)
.serve(router.into_make_service())
.await
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))
}
#[instrument(
skip_all,
level = "error",
fields(
integration_id = integration_id.as_ref(),
)
)]
async fn route(
Path(integration_id): Path<IntegrationId>,
State(InternalState {
routes,
transformer_tx,
}): State<InternalState>,
req: SerializableRequest<Unvalidated>,
) -> http::StatusCode {
if let Some(IntegrationState {
verifier,
output,
transformation,
}) = routes.get(&integration_id)
{
match req.validate(verifier).await {
Ok(req) => {
let payload = match parse_payload(
req.payload(),
transformation,
transformer_tx.clone(),
)
.await
{
Err(e) => return e,
Ok(p) => p,
};
tracing::debug!("forwarding request");
match output.handle(payload).await {
Ok(_) => http::StatusCode::NO_CONTENT,
Err(e) => {
tracing::error!("Error forwarding request: {}", e);
http::StatusCode::INTERNAL_SERVER_ERROR
}
}
}
Err(code) => {
tracing::warn!("validation failed: {code}");
code
}
}
} else {
tracing::trace!("integration not found");
http::StatusCode::NOT_FOUND
}
}
/// Figures out how to build a JSON object from the payload, optionally running it through a
/// transformation.
///
/// WRT "raw" payloads, the return value here is going to be a JSON object regardless of whether
/// or not the queue producer wants "raw" data.
///
/// When there's no transformation defined we therefore attempt to parse the body as json.
/// When a transformation is defined, we branch to see if it expects string or json input.
///
/// For either case, we expect the value produced to match the schema of a [`ForwardRequest`].
async fn parse_payload(
payload: &SerializablePayload,
transformation: &Option<TransformationConfig>,
transformer_tx: TransformerTx,
) -> Result<ForwardRequest, http::StatusCode> {
match transformation {
Some(xform) => {
let input = match xform.format() {
TransformerInputFormat::String => {
TransformerInput::String(payload.as_string().map_err(|_| {
tracing::error!("Unable to parse request body as string");
http::StatusCode::BAD_REQUEST
})?)
}
TransformerInputFormat::Json => {
TransformerInput::JSON(payload.as_json().map_err(|_| {
tracing::error!("Unable to parse request body as json");
http::StatusCode::BAD_REQUEST
})?)
}
};
transform(input, xform.source().clone(), transformer_tx).await
}
// Keep the original payload as-is if there's no transformation specified, but stuff the
// whole thing into the payload field.
// The as_json() only gets us to `Value`, so we also need a `from_value` call to marshal
// into a [`ForwardRequest`] type.
None => Ok(ForwardRequest {
payload: payload.as_json().map_err(|_| {
tracing::error!("Unable to parse request body as json");
http::StatusCode::BAD_REQUEST
})?,
}),
}
}
/// Attempts to run the payload through a js transformation.
async fn transform(
input: TransformerInput,
script: String,
tx: TransformerTx,
) -> Result<ForwardRequest, http::StatusCode> {
let (job, callback) = TransformerJob::new(script, input);
if let Err(e) = tx.send(job) {
tracing::error!("transformations are not available: {}", e);
return Err(http::StatusCode::INTERNAL_SERVER_ERROR);
}
match callback.await {
// This is the only "good" outcome giving a RHS value for the assignment.
// All other match arms should bail with a non-2xx status.
Ok(Ok(TransformerOutput::Object(obj))) => Ok(serde_json::from_value(
serde_json::Value::Object(obj),
)
.map_err(|e| {
tracing::error!("transformation produced invalid payload: {}", e);
http::StatusCode::INTERNAL_SERVER_ERROR
})?),
Ok(Ok(TransformerOutput::Invalid)) => {
tracing::error!("transformation produced invalid payload");
Err(http::StatusCode::INTERNAL_SERVER_ERROR)
}
_ => {
tracing::error!("transformation failed");
Err(http::StatusCode::INTERNAL_SERVER_ERROR)
}
}
}
#[cfg(test)]
mod tests;

View File

@@ -0,0 +1,394 @@
use std::sync::Arc;
use axum::{
body::Body,
http::{Request, StatusCode},
};
use serde_json::json;
use svix_bridge_types::{
async_trait, svix::webhooks::Webhook, ForwardRequest, ReceiverOutput, TransformationConfig,
TransformerInput, TransformerInputFormat, TransformerJob, TransformerOutput,
};
use tower::{Service, ServiceExt};
use super::router;
use crate::webhook_receiver::{
types::{IntegrationState, InternalState},
verification::{NoVerifier, SvixVerifier},
};
struct FakeReceiverOutput {
tx: tokio::sync::mpsc::UnboundedSender<serde_json::Value>,
}
impl FakeReceiverOutput {
pub fn new() -> (
Self,
tokio::sync::mpsc::UnboundedReceiver<serde_json::Value>,
) {
let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
(Self { tx }, rx)
}
}
#[async_trait]
impl ReceiverOutput for FakeReceiverOutput {
fn name(&self) -> &str {
"fake output"
}
async fn handle(&self, request: ForwardRequest) -> std::io::Result<()> {
self.tx.send(request.payload).unwrap();
Ok(())
}
}
#[tokio::test]
async fn test_forwarding_no_verification() {
let (tx, _rx) = tokio::sync::mpsc::unbounded_channel();
let (a_output, mut a_rx) = FakeReceiverOutput::new();
let state_map = [(
"a".into(),
IntegrationState {
verifier: NoVerifier.into(),
output: Arc::new(Box::new(a_output)),
transformation: None,
},
)]
.into_iter()
.collect();
let state = InternalState::new(state_map, tx);
let app = router().with_state(state);
let response = app
.oneshot(
Request::builder()
.uri("/webhook/a")
.method("POST")
.header("content-type", "application/json")
.body(serde_json::to_vec(&json!({"a": true})).unwrap().into())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NO_CONTENT);
let forwarded = a_rx.try_recv().unwrap();
assert_eq!(json!(forwarded), json!({"a": true}));
}
/// Registers 2 receivers and sends 1 request to each.
#[tokio::test]
async fn test_forwarding_multiple_receivers() {
let (tx, _rx) = tokio::sync::mpsc::unbounded_channel();
let (a_output, mut a_rx) = FakeReceiverOutput::new();
let (b_output, mut b_rx) = FakeReceiverOutput::new();
let state_map = [
(
"a".into(),
IntegrationState {
verifier: NoVerifier.into(),
output: Arc::new(Box::new(a_output)),
transformation: None,
},
),
(
"b".into(),
IntegrationState {
verifier: NoVerifier.into(),
output: Arc::new(Box::new(b_output)),
transformation: None,
},
),
]
.into_iter()
.collect();
let state = InternalState::new(state_map, tx);
let mut app = router().with_state(state);
let request = Request::builder()
.uri("/webhook/a")
.method("POST")
.header("content-type", "application/json")
.body(serde_json::to_vec(&json!({"a": true})).unwrap().into())
.unwrap();
let response = ServiceExt::<Request<Body>>::ready(&mut app)
.await
.unwrap()
.call(request)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NO_CONTENT);
let forwarded = a_rx.try_recv().unwrap();
assert_eq!(json!(forwarded), json!({"a": true}));
let request = Request::builder()
.uri("/webhook/b")
.method("POST")
.header("content-type", "application/json")
.body(serde_json::to_vec(&json!({"b": true})).unwrap().into())
.unwrap();
let response = ServiceExt::<Request<Body>>::ready(&mut app)
.await
.unwrap()
.call(request)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NO_CONTENT);
let forwarded = b_rx.try_recv().unwrap();
assert_eq!(json!(forwarded), json!({"b": true}));
// Both channels should be empty at this point.
assert!(a_rx.try_recv().is_err());
assert!(b_rx.try_recv().is_err());
}
/// Registers 2 receivers, one with a transformation and one without. Sends 1 request to each.
#[tokio::test]
async fn test_transformation_json() {
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::<TransformerJob>();
let _handle = tokio::spawn(async move {
while let Some(x) = rx.recv().await {
let mut input = match x.input {
TransformerInput::JSON(input) => input.as_object().unwrap().clone(),
_ => unreachable!(),
};
input.insert("__TRANSFORMED__".into(), json!(true));
let out = json!({ "payload": input });
x.callback_tx
.send(Ok(TransformerOutput::Object(
out.as_object().unwrap().clone(),
)))
.ok();
}
});
let (a_output, mut a_rx) = FakeReceiverOutput::new();
let (b_output, mut b_rx) = FakeReceiverOutput::new();
let state_map = [
(
"transformed".into(),
IntegrationState {
verifier: NoVerifier.into(),
output: Arc::new(Box::new(a_output)),
transformation: Some(
"handler = (x) => ({ payload: {__TRANSFORMED__: true, ...x }})".into(),
),
},
),
(
"as-is".into(),
IntegrationState {
verifier: NoVerifier.into(),
output: Arc::new(Box::new(b_output)),
transformation: None,
},
),
]
.into_iter()
.collect();
let state = InternalState::new(state_map, tx);
let mut app = router().with_state(state);
let request = Request::builder()
.uri("/webhook/transformed")
.method("POST")
.header("content-type", "application/json")
.body(serde_json::to_vec(&json!({"a": true})).unwrap().into())
.unwrap();
let response = ServiceExt::<Request<Body>>::ready(&mut app)
.await
.unwrap()
.call(request)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NO_CONTENT);
let forwarded = a_rx.try_recv().unwrap();
// The `__TRANSFORMED__` key should have been added
assert_eq!(
json!(forwarded),
json!({"a": true, "__TRANSFORMED__": true})
);
let request = Request::builder()
.uri("/webhook/as-is")
.method("POST")
.header("content-type", "application/json")
.body(serde_json::to_vec(&json!({"b": true})).unwrap().into())
.unwrap();
let response = ServiceExt::<Request<Body>>::ready(&mut app)
.await
.unwrap()
.call(request)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NO_CONTENT);
let forwarded = b_rx.try_recv().unwrap();
// The same payload should come through, without any transformation.
assert_eq!(json!(forwarded), json!({"b": true}));
// Both channels should be empty at this point.
assert!(a_rx.try_recv().is_err());
assert!(b_rx.try_recv().is_err());
}
#[tokio::test]
async fn test_transformation_string() {
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::<TransformerJob>();
let _handle = tokio::spawn(async move {
while let Some(x) = rx.recv().await {
let out = match x.input {
TransformerInput::String(input) => json!({"payload": { "got": input }})
.as_object()
.cloned()
.unwrap(),
_ => unreachable!(),
};
x.callback_tx.send(Ok(TransformerOutput::Object(out))).ok();
}
});
let (a_output, mut a_rx) = FakeReceiverOutput::new();
let state_map = [(
"transformed".into(),
IntegrationState {
verifier: NoVerifier.into(),
output: Arc::new(Box::new(a_output)),
transformation: Some(TransformationConfig::Explicit {
format: TransformerInputFormat::String,
src: String::from("handler = (x) => ({ payload: { got: x }})"),
}),
},
)]
.into_iter()
.collect();
let state = InternalState::new(state_map, tx);
let mut app = router().with_state(state);
let request = Request::builder()
.uri("/webhook/transformed")
.method("POST")
.header("content-type", "text/plain")
.body("plain text".as_bytes().into())
.unwrap();
let response = ServiceExt::<Request<Body>>::ready(&mut app)
.await
.unwrap()
.call(request)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NO_CONTENT);
let forwarded = a_rx.try_recv().unwrap();
// The plain text message should have been added in the key "got"
assert_eq!(json!(forwarded), json!({"got": "plain text"}));
assert!(a_rx.try_recv().is_err());
}
// Two different bodies - one used during signing, then the other is what we send in the request.
// This should result in a bad response status.
#[tokio::test]
async fn test_forwarding_svix_verification_mismatch() {
let signed_payload_bytes = serde_json::to_vec(&json!({"a": true})).unwrap();
let sent_payload_bytes = serde_json::to_vec(&json!({"a": false})).unwrap();
let (tx, _rx) = tokio::sync::mpsc::unbounded_channel();
let (a_output, mut a_rx) = FakeReceiverOutput::new();
let webhook = Arc::new(Webhook::new("whsec_C2FVsBQIhrscChlQIMV+b5sSYspob7oD").unwrap());
let timestamp = chrono::Utc::now().timestamp();
let signature = webhook
.sign("msg_valid", timestamp, &signed_payload_bytes)
.unwrap();
let state_map = [(
"a".into(),
IntegrationState {
verifier: SvixVerifier::new(webhook).into(),
output: Arc::new(Box::new(a_output)),
transformation: None,
},
)]
.into_iter()
.collect();
let state = InternalState::new(state_map, tx);
let app = router().with_state(state);
let response = app
.oneshot(
Request::builder()
.uri("/webhook/a")
.method("POST")
.header("content-type", "application/json")
.header("svix-id", "msg_valid")
.header("svix-signature", signature.clone())
.header("svix-timestamp", &format!("{timestamp}"))
.body(sent_payload_bytes.into())
.unwrap(),
)
.await
.unwrap();
// Expect a rejection due to signature verification failure.
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
// There should be noting in the channel since the request should _not have been forwarded_.
assert!(a_rx.try_recv().is_err());
}
#[tokio::test]
async fn test_forwarding_svix_verification_match() {
let (tx, _rx) = tokio::sync::mpsc::unbounded_channel();
let (a_output, mut a_rx) = FakeReceiverOutput::new();
let webhook = Arc::new(Webhook::new("whsec_C2FVsBQIhrscChlQIMV+b5sSYspob7oD").unwrap());
let payload = json!({"a": true});
let payload_bytes = serde_json::to_vec(&payload).unwrap();
let timestamp = chrono::Utc::now().timestamp();
let signature = webhook
.sign("msg_valid", timestamp, &payload_bytes)
.unwrap();
let state_map = [(
"a".into(),
IntegrationState {
verifier: SvixVerifier::new(webhook).into(),
output: Arc::new(Box::new(a_output)),
transformation: None,
},
)]
.into_iter()
.collect();
let state = InternalState::new(state_map, tx);
let app = router().with_state(state);
let response = app
.oneshot(
Request::builder()
.uri("/webhook/a")
.method("POST")
.header("content-type", "application/json")
.header("svix-id", "msg_valid")
.header("svix-signature", signature.clone())
.header("svix-timestamp", &format!("{timestamp}"))
.body(payload_bytes.into())
.unwrap(),
)
.await
.unwrap();
assert_eq!(response.status(), StatusCode::NO_CONTENT);
let forwarded = a_rx.try_recv().unwrap();
assert_eq!(json!(forwarded), json!({"a": true}));
}

View File

@@ -0,0 +1,396 @@
use std::{collections::HashMap, marker::PhantomData, sync::Arc};
use anyhow::Result;
use axum::{
async_trait,
body::{Bytes, HttpBody},
extract::FromRequest,
BoxError,
};
use http::{HeaderMap, HeaderValue, Request};
use serde::{Deserialize, Serialize};
use svix_bridge_types::{
svix, ReceiverInputOpts, ReceiverOutput, TransformationConfig, TransformerTx, WebhookVerifier,
};
use super::verification::{NoVerifier, SvixVerifier, VerificationMethod, Verifier};
use crate::config::ReceiverConfig;
#[derive(Clone)]
/// The [`InternalState`] is passed to the Axum route and is used to map the "IntegrationId" in the
/// URL to the configured [`Verifier`] and [`Forwarder`] variants.
pub struct InternalState {
pub routes: Arc<HashMap<IntegrationId, IntegrationState>>,
pub transformer_tx: TransformerTx,
}
impl std::fmt::Debug for InternalState {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("InternalState").finish()
}
}
impl InternalState {
/// For most production use cases, favor [`InternalState::from_receiver_configs`].
/// Mostly this is an escape hatch to help with testing.
///
/// Constructs an [`InternalState`] from a raw mapping of [`IntegrationId`] to
/// [`IntegrationState`], allowing us to bypass all the config parsing machinery.
///
/// By skipping the config parsing, we can provide custom (i.e. not exposed through the public
/// config) [`ReceiverOutput`] implementations.
pub fn new(
state_map: HashMap<IntegrationId, IntegrationState>,
transformer_tx: TransformerTx,
) -> Self {
InternalState {
routes: Arc::new(state_map),
transformer_tx,
}
}
pub async fn from_receiver_configs(
routes: Vec<ReceiverConfig>,
transformer_tx: TransformerTx,
) -> Result<Self> {
let mut state_map = HashMap::new();
for cfg in routes {
let verifier = match &cfg.input {
ReceiverInputOpts::Webhook {
verification: WebhookVerifier::Svix { endpoint_secret },
..
}
| ReceiverInputOpts::SvixWebhook {
endpoint_secret, ..
} => SvixVerifier::new(Arc::new(
svix::webhooks::Webhook::new(endpoint_secret).expect("Invalid Svix secret"),
))
.into(),
ReceiverInputOpts::Webhook {
verification: WebhookVerifier::None,
..
} => NoVerifier.into(),
};
state_map.insert(
IntegrationId(cfg.input.path_id().to_string()),
IntegrationState {
verifier,
transformation: cfg.transformation.clone(),
output: Arc::new(cfg.into_receiver_output().await?),
},
);
}
Ok(InternalState::new(state_map, transformer_tx))
}
}
/// Each [`IntegrationId`] is a valid route for webhooks to be dispatched to managed by this server,
/// and each [`IntegrationId`] has an associated configuration which defines how the webhook is
/// verified (the [`VerificationScheme`]) and where the webhook is routed to once it is verified
/// (the [`ForwardDestination`]).
///
/// Internally it is also associated with an [`IntegrationState`] which will contain the necessary
/// members to actually perform these actions eg. a handle to a [`FutureProducer`] instead of simply
/// the address(es) of the Kafka bootstrap server(s).
///
/// This type is simply a wrapper for a [`String`] which *should* be safe to use in a URL. If it is
/// not a valid path component for a URL, then the [`IntegrationId`] will never receive any
/// webhooks. However, for simplicity, the inner [`String`] is not validated for URL-safety at this
/// time.
#[repr(transparent)]
#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub struct IntegrationId(String);
impl From<String> for IntegrationId {
fn from(value: String) -> Self {
IntegrationId(value)
}
}
impl From<&str> for IntegrationId {
fn from(value: &str) -> Self {
IntegrationId(value.to_string())
}
}
impl AsRef<str> for IntegrationId {
fn as_ref(&self) -> &str {
&self.0
}
}
/// The [`IntegrationState`] is a struct which is only able to be created via conversion from a
/// [`IntegrationConfig`]. This struct is what is associated with an `[IntegrationId`] internally
/// after the configuration has been read.
///
/// What distinguishes it from the [`IntegrationConfig`] is that it contains the necessary members
/// for validating and forwarding a webhook instead of just containing the definition of how to
/// derive these necessary members.
#[derive(Clone)]
pub struct IntegrationState {
pub verifier: Verifier,
pub output: Arc<Box<dyn ReceiverOutput>>,
pub transformation: Option<TransformationConfig>,
}
/// The [`RequestFromParts`] is a structure consisting of all relevant parts of the HTTP request to
/// be validated by a [`Verifier`] implementor. This is to be immediately converted into the struct
/// [`SerializableRequest<Unvalidated>`] via its [`FromRequest`] implementation.
///
/// NOTE: This struct is never to be used directly unless by proxy of the aforementioned impl of
/// [`FromRequest`]. It's simply used as any easy way to implement [`FromRequest`] via a macro .
#[derive(Clone, Debug, FromRequest)]
pub struct RequestFromParts {
headers: HeaderMap,
payload: Bytes,
}
/// A simple marker trait to denote the state of a [`SerializableRequest`]. The only way to publicly
/// construct any [`SerializableRequest<Validated>`]s is via the associated method on unvalidated
/// request's, [`SerializableRequest<Unvalidated>::validate`].
pub trait RequestState {}
#[derive(Clone, Copy, Debug)]
pub struct Unvalidated;
impl RequestState for Unvalidated {}
#[derive(Clone, Copy, Debug)]
pub struct Validated;
impl RequestState for Validated {}
/// This intermediary representation is necessary because it is preferable to serialize the headers
/// and/or body as a [`String`] over bytes when dealing with some [`VerificationMethod`]s and some
/// [`ForwardingMethod`]s. This struct represents both the headers and body as enums which allow for
/// either textual representations or byte representations when [`Serialize`]d via [`serde`].
///
/// On trying to convert a [`Standard`] variant into a [`StringSerializable`] variant, HTTP headers
/// will be represented textually if and only if they are completely ASCII, while any bodies will
/// attempt to be read as UTF-8 before falling back to bytes.
///
/// NOTE: This conversion *should* be lazy. The [`String`] variant are only acceptable in a subset
/// of all cases, so lazy-conversion will prevent needless conversion back and forth. You may check
/// whether the conversion is required and/or helpful with [`VerificationMethod::want_string_rep`]
/// or [`VerificationMethod::need_string_rep`] plus the [`ForwardingMethod`] equivalents.
///
/// The intended course of action is to attempt to convert to string-serializable variants of the
/// header map and the body immediately if either of the aforementioned methods are true -- but
/// only returning an [`Err`] response if it *needs* it. Then, if the validation is a success (see
/// [`SerializableRequest<Unvalidated>::validate`] and a validated equivalent is returned, then the
/// same checks are to be performed, but with the [`ForwardingMethod`] methods before being sent to
/// the appropriate [`ForwardingMethod`] implementor.
#[derive(Clone, Debug, Serialize)]
pub struct SerializableRequest<S: RequestState> {
headers: SerializableHeaderMap,
payload: SerializablePayload,
#[serde(skip)]
_pd: PhantomData<S>,
}
impl<S: RequestState> SerializableRequest<S> {
pub fn headers(&self) -> &SerializableHeaderMap {
&self.headers
}
pub fn payload(&self) -> &SerializablePayload {
&self.payload
}
}
impl From<RequestFromParts> for SerializableRequest<Unvalidated> {
fn from(value: RequestFromParts) -> Self {
Self {
headers: SerializableHeaderMap::Standard(value.headers),
payload: SerializablePayload::Standard(value.payload.to_vec()),
_pd: PhantomData,
}
}
}
#[async_trait]
impl<S, B> FromRequest<S, B> for SerializableRequest<Unvalidated>
where
S: Send + Sync,
B: HttpBody + Send + Sync + 'static,
B::Data: Send,
B::Error: Into<BoxError>,
{
type Rejection = <RequestFromParts as FromRequest<S, B>>::Rejection;
async fn from_request(req: Request<B>, state: &S) -> Result<Self, Self::Rejection> {
RequestFromParts::from_request(req, state)
.await
.map(Into::into)
}
}
impl SerializableRequest<Unvalidated> {
/// Given a specific validator
pub async fn validate<V: VerificationMethod>(
mut self,
verifier: &V,
) -> Result<SerializableRequest<Validated>, http::StatusCode> {
// Do relevant conversions to [`String`] representations if wanted/needed
match (verifier.want_string_rep(), verifier.need_string_rep()) {
// Needed
(true, true) | (false, true) => {
self.headers = self
.headers
.try_to_string()
.map_err(|_| http::StatusCode::BAD_REQUEST)?;
self.payload = self
.payload
.try_to_string()
.map_err(|_| http::StatusCode::BAD_REQUEST)?;
}
// Wanted, but not needed
(true, false) => {
self.headers = match self.headers.try_to_string() {
Ok(h) => h,
Err(h) => h,
};
self.payload = match self.payload.try_to_string() {
Ok(p) => p,
Err(p) => p,
};
}
// Not wanted
(false, false) => {}
};
// FIXME: No cloning
// Then actually use the [`VerificationMethod`] implementor.
match verifier.validate(self.clone()).await {
Ok(true) => Ok(SerializableRequest::<Validated> {
headers: self.headers,
payload: self.payload,
_pd: PhantomData,
}),
Ok(false) => {
// FIXME: Read config to know whether to log
Err(http::StatusCode::BAD_REQUEST)
}
Err(e) => {
tracing::error!("Error validating request: {}", e);
Err(http::StatusCode::INTERNAL_SERVER_ERROR)
}
}
}
}
#[derive(Clone, Debug)]
pub enum SerializableHeaderMap {
Standard(HeaderMap),
StringSerializable(HashMap<String, String>),
}
impl<'a> IntoIterator for &'a SerializableHeaderMap {
type Item = (&'a str, &'a [u8]);
type IntoIter = SerializableHeaderMapIter<'a>;
fn into_iter(self) -> Self::IntoIter {
match self {
SerializableHeaderMap::Standard(hm) => SerializableHeaderMapIter::HeaderMap(hm.iter()),
SerializableHeaderMap::StringSerializable(hm) => {
SerializableHeaderMapIter::HashMap(hm.iter())
}
}
}
}
impl SerializableHeaderMap {
pub fn try_to_string(self) -> Result<Self, Self> {
match self {
Self::Standard(header_map) => Ok(Self::StringSerializable(
header_map
.iter()
.map(|(name, value)| Ok((name.as_str().to_owned(), value.to_str()?.to_owned())))
.collect::<Result<HashMap<String, String>>>()
.map_err(|_| Self::Standard(header_map))?,
)),
Self::StringSerializable(hash_map) => Ok(Self::StringSerializable(hash_map)),
}
}
pub fn len(&self) -> usize {
match self {
Self::Standard(m) => m.len(),
Self::StringSerializable(m) => m.len(),
}
}
}
/// Serialize is not implemented on [`HeaderMap`]s themselves, so custom serialization is required.
impl Serialize for SerializableHeaderMap {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
Self::Standard(header_map) => header_map
.iter()
.map(|(name, value)| (name.as_str().to_owned(), value.as_bytes().to_vec()))
.collect::<HashMap<String, Vec<u8>>>()
.serialize(serializer),
Self::StringSerializable(hash_map) => hash_map.serialize(serializer),
}
}
}
pub enum SerializableHeaderMapIter<'a> {
HeaderMap(http::header::Iter<'a, HeaderValue>),
HashMap(std::collections::hash_map::Iter<'a, String, String>),
}
impl<'a> Iterator for SerializableHeaderMapIter<'a> {
type Item = (&'a str, &'a [u8]);
fn next(&mut self) -> Option<Self::Item> {
match self {
Self::HeaderMap(hm) => hm.next().map(|(k, v)| (k.as_str(), v.as_bytes())),
Self::HashMap(hm) => hm.next().map(|(k, v)| (k.as_str(), v.as_bytes())),
}
}
}
#[derive(Clone, Debug, Serialize)]
#[serde(untagged)]
pub enum SerializablePayload {
Standard(Vec<u8>),
StringSerializable(String),
}
impl SerializablePayload {
fn try_to_string(self) -> Result<Self, Self> {
match self {
Self::Standard(v) => Ok(Self::StringSerializable(
String::from_utf8(v).map_err(|e| Self::Standard(e.into_bytes()))?,
)),
Self::StringSerializable(s) => Ok(Self::StringSerializable(s)),
}
}
pub fn as_json(&self) -> Result<serde_json::Value> {
Ok(match self {
Self::Standard(v) => serde_json::from_slice(v)?,
Self::StringSerializable(s) => serde_json::from_str(s)?,
})
}
pub fn as_string(&self) -> Result<String> {
match self {
Self::Standard(v) => Ok(String::from_utf8(v.clone())?),
Self::StringSerializable(s) => Ok(s.clone()),
}
}
}

View File

@@ -0,0 +1,128 @@
use std::sync::Arc;
use anyhow::Result;
use axum::async_trait;
use enum_dispatch::enum_dispatch;
use svix_bridge_types::svix::webhooks::Webhook;
use super::types::{SerializableHeaderMap, SerializablePayload, SerializableRequest, Unvalidated};
#[async_trait]
#[enum_dispatch]
pub trait VerificationMethod {
async fn validate(&self, req: SerializableRequest<Unvalidated>) -> Result<bool>;
fn want_string_rep(&self) -> bool {
false
}
fn need_string_rep(&self) -> bool {
false
}
}
#[derive(Clone)]
pub struct SvixVerifier {
webhook: Arc<Webhook>,
}
impl SvixVerifier {
pub fn new(webhook: Arc<Webhook>) -> Self {
Self { webhook }
}
}
impl std::fmt::Debug for SvixVerifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SvixVerifier").finish()
}
}
#[async_trait]
impl VerificationMethod for SvixVerifier {
/// This [`VerificationMethod::validate`] implementation *requires* that the headers *and* payload
/// be in their byte representations due to the requirements of the [`svix`] library. Please lazily
/// convert these values such as to avoid pointless back-and-forth conversions.
async fn validate(&self, req: SerializableRequest<Unvalidated>) -> Result<bool> {
let headers = req.headers();
let payload = req.payload();
match (headers, payload) {
(SerializableHeaderMap::Standard(headers), SerializablePayload::Standard(payload)) => {
if self.webhook.verify(payload, headers).is_ok() {
Ok(true)
} else {
Ok(false)
}
}
_ => {
anyhow::bail!("`SvixVerifier::validate` given string representations")
}
}
}
}
#[derive(Clone, Copy, Debug)]
pub struct NoVerifier;
#[async_trait]
impl VerificationMethod for NoVerifier {
async fn validate(&self, _req: SerializableRequest<Unvalidated>) -> Result<bool> {
Ok(true)
}
}
// Allowed due to restrictions by [`enum_dispatch`] on variant names matching the structure names
#[allow(clippy::enum_variant_names)]
#[enum_dispatch(VerificationMethod)]
#[derive(Clone, Debug)]
pub enum Verifier {
SvixVerifier,
NoVerifier,
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use axum::extract::FromRequest;
use svix_bridge_types::svix::webhooks::Webhook;
use super::{super::types::SerializableRequest, SvixVerifier, VerificationMethod};
#[tokio::test]
async fn test_svix_verification() {
let secret = "whsec_C2FVsBQIhrscChlQIMV+b5sSYspob7oD".to_owned();
let webhook = Arc::new(Webhook::new(&secret).unwrap());
let payload = "example payload".as_bytes();
let timestamp = chrono::Utc::now().timestamp();
let signature = webhook.sign("msg_valid", timestamp, payload).unwrap();
let sv = SvixVerifier { webhook };
let req = http::request::Request::builder()
.method("POST")
.uri("test.uri")
.header("svix-id", "msg_valid")
.header("svix-signature", signature.clone())
.header("svix-timestamp", &format!("{timestamp}"))
.body(axum::body::Full::new(payload))
.unwrap();
let sr = SerializableRequest::from_request(req, &()).await.unwrap();
assert!(sv.validate(sr).await.unwrap());
let req = http::request::Request::builder()
.method("POST")
.uri("test.uri")
.header("svix-id", "msg_invalid")
.header("svix-signature", signature)
.header("svix-timestamp", &format!("{timestamp}"))
.body(axum::body::Full::new(payload))
.unwrap();
let sr = SerializableRequest::from_request(req, &()).await.unwrap();
assert!(!sv.validate(sr).await.unwrap());
}
}

View File

@@ -0,0 +1,42 @@
# OpenAPI Generator Ignore
# Generated by openapi-generator https://github.com/openapitools/openapi-generator
# Use this file to prevent files from being overwritten by the generator.
# The patterns follow closely to .gitignore or .dockerignore.
# As an example, the C# client generator defines ApiClient.cs.
# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
#ApiClient.cs
# You can match any string of characters against a directory, file or extension with a single asterisk (*):
#foo/*/qux
# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
#foo/**/qux
# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
# You can also negate patterns with an exclamation (!).
# For example, you can ignore all files in a docs folder with the file extension .md:
#docs/*.md
# Then explicitly reverse the ignore rule for a single file:
#!docs/README.md
# Do not overwrite existing Visual Studio generated files
Svix.csproj
Svix.Tests.csproj
Svix.sln
# Do not overwrite readme
/README.md
# Prevent the generation of the following content
/appveyor.yml
/.gitignore
/git_push.sh
docs/
src/
Svix/Generated/OpenApi/Svix/Model/ApplicationPatch.cs
Svix/Generated/OpenApi/Svix/Model/EndpointPatch.cs
Svix/Generated/OpenApi/Svix/Model/EventTypePatch.cs

View File

@@ -0,0 +1,47 @@
using System;
using System.Net;
using Moq;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
using Xunit;
namespace Svix.Tests
{
public sealed class ApplicationTests
{
private const string MOCK_TOKEN = ";iuani;ansd;ifgjbnai;sdjfgb";
private readonly Mock<IApplicationApi> _mockApplicationApi;
private readonly Mock<ISvixOptions> _mockOptions;
private readonly SvixClient _svixClient;
public ApplicationTests()
{
_mockApplicationApi = new Mock<IApplicationApi>();
_mockOptions = new Mock<ISvixOptions>();
_svixClient = new SvixClient(
MOCK_TOKEN,
_mockOptions.Object,
applicationApi: _mockApplicationApi.Object);
}
[Fact]
public void ApplicationCreate_WithoutApplication_ThrowsException()
{
// Assert
Assert.Throws<ArgumentNullException>(() => _svixClient.Application.Create(null, null));
}
[Fact]
public void ApplicationCreateAsync_WithoutApplication_ThrowsException()
{
// Assert
Assert.ThrowsAsync<ArgumentNullException>(() => _svixClient.Application.CreateAsync(null, null, null, default));
}
}
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net5.0</TargetFramework>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.9.4" />
<PackageReference Include="Moq" Version="4.16.1" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.3">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="3.0.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Svix\Svix.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,25 @@
using Microsoft.Extensions.Logging.Abstractions;
using Svix.Models;
using Xunit;
namespace Svix.Tests
{
public class SvixClientTests
{
[Fact]
public void Constructor_WhenCalled_DoesNotNeedLogger()
{
var sut = new SvixClient("", new SvixOptions("http://some.url"));
Assert.NotNull(sut);
}
[Fact]
public void Constructor_WhenCalled_AcceptsLogger()
{
var sut = new SvixClient("", new SvixOptions("http://some.url"), new NullLogger<SvixClient>());
Assert.NotNull(sut);
}
}
}

View File

@@ -0,0 +1,181 @@
using Xunit;
using System;
using System.Net;
using Svix.Exceptions;
namespace Svix.Tests
{
class TestPayload
{
internal const string SVIX_ID_HEADER_KEY = "svix-id";
internal const string SVIX_SIGNATURE_HEADER_KEY = "svix-signature";
internal const string SVIX_TIMESTAMP_HEADER_KEY = "svix-timestamp";
private const string DEFAULT_MSG_ID = "msg_p5jXN8AQM9LWM0D4loKWxJek";
private const string DEFAULT_PAYLOAD = "{\"test\": 2432232314}";
private const string DEFAULT_SECRET = "MfKQ9r8GKYqrTwjUPD8ILPZIo2LaLaSw";
public string id;
public DateTimeOffset timestamp;
public WebHeaderCollection headers;
public string secret;
public string payload;
public string signature;
public TestPayload(DateTimeOffset timestamp)
{
id = DEFAULT_MSG_ID;
this.timestamp = timestamp;
payload = DEFAULT_PAYLOAD;
secret = DEFAULT_SECRET;
Webhook wh = new Webhook(secret);
var signature = wh.Sign(id, this.timestamp, payload);
headers = new WebHeaderCollection();
headers.Set(SVIX_ID_HEADER_KEY, id);
headers.Set(SVIX_SIGNATURE_HEADER_KEY, signature);
headers.Set(SVIX_TIMESTAMP_HEADER_KEY, timestamp.ToUnixTimeSeconds().ToString());
}
}
public class WebhookTests
{
public const int TOLERANCE_IN_SECONDS = 5 * 60;
[Fact]
public void TestMissingIdRaisesException()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
testPayload.headers.Remove(TestPayload.SVIX_ID_HEADER_KEY);
var wh = new Webhook(testPayload.secret);
Assert.Throws<WebhookVerificationException>(() => wh.Verify(testPayload.payload, testPayload.headers));
}
[Fact]
public void TestMissingTimestampThrowsException()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
testPayload.headers.Remove(TestPayload.SVIX_TIMESTAMP_HEADER_KEY);
var wh = new Webhook(testPayload.secret);
Assert.Throws<WebhookVerificationException>(() => wh.Verify(testPayload.payload, testPayload.headers));
}
[Fact]
public void TestMissingSignatureThrowsException()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
testPayload.headers.Remove(TestPayload.SVIX_SIGNATURE_HEADER_KEY);
var wh = new Webhook(testPayload.secret);
Assert.Throws<WebhookVerificationException>(() => wh.Verify(testPayload.payload, testPayload.headers));
}
[Fact]
public void TestInvalidSignatureThrowsException()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
testPayload.headers.Set(TestPayload.SVIX_SIGNATURE_HEADER_KEY, "v1,g0hM9SsE+OTPJTGt/tmIKtSyZlE3uFJELVlNIOLawdd");
var wh = new Webhook(testPayload.secret);
Assert.Throws<WebhookVerificationException>(() => wh.Verify(testPayload.payload, testPayload.headers));
}
[Fact]
public void TestValidSignatureIsValid()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
var wh = new Webhook(testPayload.secret);
wh.Verify(testPayload.payload, testPayload.headers);
}
[Fact]
public void TestUnbrandedSignatureIsValid()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
WebHeaderCollection unbrandedHeaders = new WebHeaderCollection();
unbrandedHeaders.Set("webhook-id", testPayload.headers.Get(TestPayload.SVIX_ID_HEADER_KEY));
unbrandedHeaders.Set("webhook-signature", testPayload.headers.Get(TestPayload.SVIX_SIGNATURE_HEADER_KEY));
unbrandedHeaders.Set("webhook-timestamp", testPayload.headers.Get(TestPayload.SVIX_TIMESTAMP_HEADER_KEY));
testPayload.headers = unbrandedHeaders;
var wh = new Webhook(testPayload.secret);
wh.Verify(testPayload.payload, testPayload.headers);
}
[Fact]
public void TestOldTimestampThrowsException()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow.AddSeconds(-1 * (TOLERANCE_IN_SECONDS + 1)));
var wh = new Webhook(testPayload.secret);
Assert.Throws<WebhookVerificationException>(() => wh.Verify(testPayload.payload, testPayload.headers));
}
[Fact]
public void TestNewTimestampThrowsException()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow.AddSeconds(TOLERANCE_IN_SECONDS + 1));
var wh = new Webhook(testPayload.secret);
Assert.Throws<WebhookVerificationException>(() => wh.Verify(testPayload.payload, testPayload.headers));
}
[Fact]
public void TestMultiSigPayloadIsValid()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
string[] sigs = new string[] {
"v1,Ceo5qEr07ixe2NLpvHk3FH9bwy/WavXrAFQ/9tdO6mc=",
"v2,Ceo5qEr07ixe2NLpvHk3FH9bwy/WavXrAFQ/9tdO6mc=",
testPayload.headers.Get(TestPayload.SVIX_SIGNATURE_HEADER_KEY), // valid signature
"v1,Ceo5qEr07ixe2NLpvHk3FH9bwy/WavXrAFQ/9tdO6mc=",
};
testPayload.headers.Set(TestPayload.SVIX_SIGNATURE_HEADER_KEY, String.Join(" ", sigs));
var wh = new Webhook(testPayload.secret);
wh.Verify(testPayload.payload, testPayload.headers);
}
[Fact]
public void TestSivnatureVerificationWorksWithoutPrefix()
{
var testPayload = new TestPayload(DateTimeOffset.UtcNow);
var wh = new Webhook(testPayload.secret);
wh.Verify(testPayload.payload, testPayload.headers);
wh = new Webhook("whsec_" + testPayload.secret);
wh.Verify(testPayload.payload, testPayload.headers);
}
[Fact]
public void verifyWebhookSignWorks()
{
var key = "whsec_MfKQ9r8GKYqrTwjUPD8ILPZIo2LaLaSw";
var msgId = "msg_p5jXN8AQM9LWM0D4loKWxJek";
var timestamp = DateTimeOffset.FromUnixTimeSeconds(1614265330);
var payload = "{\"test\": 2432232314}";
var expected = "v1,g0hM9SsE+OTPJTGt/tmIKtSyZlE3uFJELVlNIOLJ1OE=";
var wh = new Webhook(key);
var signature = wh.Sign(msgId, timestamp, payload);
Assert.Equal(signature, expected);
}
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,41 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IApplication
{
public ApplicationOut Create(ApplicationIn application, ApplicationCreateOptions options = null,
string idempotencyKey = default);
public Task<ApplicationOut> CreateAsync(ApplicationIn application, ApplicationCreateOptions options = null,
string idempotencyKey = default, CancellationToken cancellationToken = default);
bool Delete(string appId, string idempotencyKey = default);
Task<bool> DeleteAsync(string appId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ApplicationOut Get(string appId, string idempotencyKey = default);
Task<ApplicationOut> GetAsync(string appId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseApplicationOut List(ListOptions options = null, string idempotencyKey = default);
Task<ListResponseApplicationOut> ListAsync(ListOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default);
ApplicationOut Update(string appId, ApplicationIn application, string idempotencyKey = default);
Task<ApplicationOut> UpdateAsync(string appId, ApplicationIn application, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ApplicationOut Patch(string appId, ApplicationPatch application, string idempotencyKey = default);
Task<ApplicationOut> PatchAsync(string appId, ApplicationPatch application, string idempotencyKey = default,
CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,18 @@
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
namespace Svix.Abstractions
{
public interface IAuthentication
{
DashboardAccessOut GetDashboardAccess(string appId, string idempotencyKey = default);
Task<DashboardAccessOut> GetDashboardAccessAsync(string appId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
bool Logout(string idempotencyKey = default);
Task<bool> LogoutAsync(string idempotencyKey = default, CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,20 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IBackgroundTask
{
BackgroundTaskOut Get(string taskId, string idempotencyKey = default);
Task<BackgroundTaskOut> GetAsync(string taskId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseBackgroundTaskOut List(BackgroundTaskListOptions options = null, string idempotencyKey = default);
Task<ListResponseBackgroundTaskOut> ListAsync(BackgroundTaskListOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,76 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IEndpoint
{
EndpointOut Create(string appId, EndpointIn endpoint, string idempotencyKey = default);
Task<EndpointOut> CreateAsync(string appId, EndpointIn endpoint, string idempotencyKey = default,
CancellationToken cancellationToken = default);
bool Delete(string appId, string endpointId, string idempotencyKey = default);
Task<bool> DeleteAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EndpointOut Get(string appId, string endpointId, string idempotencyKey = default);
Task<EndpointOut> GetAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EndpointHeadersOut GetHeaders(string appId, string endpointId, string idempotencyKey = default);
Task<EndpointHeadersOut> GetHeadersAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
string GetSecret(string appId, string endpointId, string idempotencyKey = default);
Task<string> GetSecretAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseEndpointOut List(string appId, ListOptions options = null, string idempotencyKey = default);
Task<ListResponseEndpointOut> ListAsync(string appId, ListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
bool PatchHeaders(string appId, string endpointId, EndpointHeadersPatchIn headers, string idempotencyKey = default);
Task<bool> PatchHeadersAsync(string appId, string endpointId, EndpointHeadersPatchIn headers,
string idempotencyKey = default, CancellationToken cancellationToken = default);
bool Recover(string appId, string endpointId, RecoverIn recover, string idempotencyKey = default);
Task<bool> RecoverAsync(string appId, string endpointId, RecoverIn recover, string idempotencyKey = default,
CancellationToken cancellationToken = default);
bool RotateSecret(string appId, string endpointId, EndpointSecretRotateIn secret, string idempotencyKey = default);
Task<bool> RotateSecretAsync(string appId, string endpointId, EndpointSecretRotateIn secret, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EndpointOut Update(string appId, string endpointId, EndpointUpdate endpoint, string idempotencyKey = default);
Task<EndpointOut> UpdateAsync(string appId, string endpointId, EndpointUpdate endpoint, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EndpointOut Patch(string appId, string endpointId, EndpointPatch endpoint, string idempotencyKey = default);
Task<EndpointOut> PatchAsync(string appId, string endpointId, EndpointPatch endpoint, string idempotencyKey = default,
CancellationToken cancellationToken = default);
bool UpdateHeaders(string appId, string endpointId, EndpointHeadersIn headers, string idempotencyKey = default);
Task<bool> UpdateHeadersAsync(string appId, string endpointId, EndpointHeadersIn headers,
string idempotencyKey = default, CancellationToken cancellationToken = default);
EndpointStats GetStats(string appId, string endpointId, string idempotencyKey = default);
Task<EndpointStats> GetStatsAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,41 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IEventType
{
bool Archive(string eventType, bool? expunge = null, string idempotencyKey = default);
Task<bool> ArchiveAsync(string eventType, bool? expunge = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EventTypeOut Create(EventTypeIn eventType, string idempotencyKey = default);
Task<EventTypeOut> CreateAsync(EventTypeIn eventType, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EventTypeOut Get(string eventType, string idempotencyKey = default);
Task<EventTypeOut> GetAsync(string eventType, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseEventTypeOut List(EventTypeListOptions options = null, string idempotencyKey = default);
Task<ListResponseEventTypeOut> ListAsync(EventTypeListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EventTypeOut Update(string eventType, EventTypeUpdate update, string idempotencyKey = default);
Task<EventTypeOut> UpdateAsync(string eventType, EventTypeUpdate update, string idempotencyKey = default,
CancellationToken cancellationToken = default);
EventTypeOut Patch(string eventType, EventTypePatch update, string idempotencyKey = default);
Task<EventTypeOut> PatchAsync(string eventType, EventTypePatch update, string idempotencyKey = default,
CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,12 @@
using System.Threading;
using System.Threading.Tasks;
namespace Svix.Abstractions
{
public interface IHealth
{
bool IsHealthy(string idempotencyKey = default);
Task<bool> IsHealthyAsync(string idempotencyKey = default, CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,44 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IIntegration
{
IntegrationOut Create(string appId, IntegrationIn integration, string idempotencyKey = default);
Task<IntegrationOut> CreateAsync(string appId, IntegrationIn integration, string idempotencyKey = default,
CancellationToken cancellationToken = default);
bool Delete(string appId, string integrationId, string idempotencyKey = default);
Task<bool> DeleteAsync(string appId, string integrationId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
IntegrationOut Get(string appId, string integrationId, string idempotencyKey = default);
Task<IntegrationOut> GetAsync(string appId, string integrationId, string idempotencyKey = default, CancellationToken cancellationToken = default);
string GetKey(string appId, string integrationId, string idempotencyKey = default);
Task<string> GetKeyAsync(string appId, string integrationId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseIntegrationOut List(string appId, ListOptions options = null, string idempotencyKey = default);
Task<ListResponseIntegrationOut> ListAsync(string appId, ListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
string RotateKey(string appId, string integrationId, string idempotencyKey = default);
Task<string> RotateKeyAsync(string appId, string integrationId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
IntegrationOut Update(string appId, string integrationId, IntegrationUpdate integration, string idempotencyKey = default);
Task<IntegrationOut> UpdateAsync(string appId, string integrationId, IntegrationUpdate integration, string idempotencyKey = default, CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,27 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IMessage
{
MessageOut Create(string appId, MessageIn message, MessageCreateOptions options = null,
string idempotencyKey = default);
Task<MessageOut> CreateAsync(string appId, MessageIn message, MessageCreateOptions options = null,
string idempotencyKey = default, CancellationToken cancellationToken = default);
MessageOut Get(string appId, string messageId, string idempotencyKey = default);
Task<MessageOut> GetAsync(string appId, string messageId, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseMessageOut List(string appId, MessageListOptions options = null, string idempotencyKey = default);
Task<ListResponseMessageOut> ListAsync(string appId, MessageListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,55 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IMessageAttempt
{
MessageAttemptOut GetAttempt(string appId, string attemptId, string messageId, string idempotencyKey = default);
Task<MessageAttemptOut> GetAttemptAsync(string appId, string attemptId, string messageId, string idempotencyKey = default, CancellationToken cancellationToken = default);
ListResponseEndpointMessageOut ListAttemptedMessages(string appId, string endpointId, MessageAttemptListOptions options = null,
string idempotencyKey = default);
Task<ListResponseEndpointMessageOut> ListAttemptedMessagesAsync(string appId, string endpointId, MessageAttemptListOptions options = null,
string idempotencyKey = default, CancellationToken cancellationToken = default);
ListResponseMessageAttemptOut ListAttemptsByEndpoint(string appId, string endpointId, AttemptsByEndpointListOptions options = null, string idempotencyKey = default);
Task<ListResponseMessageAttemptOut> ListAttemptsByEndpointAsync(string appId, string endpointId, AttemptsByEndpointListOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default);
ListResponseMessageAttemptOut ListAttemptsByMessage(string appId, string messageId, AttemptsByMessageListOptions options = null, string idempotencyKey = default);
Task<ListResponseMessageAttemptOut> ListAttemptsByMessageAsync(string appId, string messageId, AttemptsByMessageListOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default);
ListResponseMessageAttemptEndpointOut ListAttemptsForEndpoint(string appId, string messageId, string endpointId,
AttemptsByEndpointListOptions options = null, string idempotencyKey = default);
Task<ListResponseMessageAttemptEndpointOut> ListAttemptsForEndpointAsync(string appId, string messageId, string endpointId,
AttemptsByEndpointListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseMessageAttemptOut ListAttempts(string appId, string messageId,
MessageAttemptListOptions options = null, string idempotencyKey = default);
Task<ListResponseMessageAttemptOut> ListAttemptsAsync(string appId, string messageId,
MessageAttemptListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
ListResponseMessageEndpointOut ListAttemptedDestinations(string appId, string messageId,
ListOptions options = null, string idempotencyKey = default);
Task<ListResponseMessageEndpointOut> ListAttemptedDestinationsAsync(string appId, string messageId,
ListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default);
bool ResendWebhook(string appId, string messageId, string endpointId, string idempotencyKey = default);
Task<bool> ResendWebhookAsync(string appId, string messageId, string endpointId,
string idempotencyKey = default, CancellationToken cancellationToken = default);
}
}

View File

@@ -0,0 +1,20 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Svix.Model;
using Svix.Models;
namespace Svix.Abstractions
{
public interface IStatistics
{
AppUsageStatsOut AggregateAppStats(AppUsageStatsIn appUsageStatsIn, string idempotencyKey = default);
Task<AppUsageStatsOut> AggregateAppStatsAsync(AppUsageStatsIn appUsageStatsIn, string idempotencyKey = default);
AggregateEventTypesOut AggregateEventTypes();
Task<AggregateEventTypesOut> AggregateEventTypesAsync();
}
}

View File

@@ -0,0 +1,29 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace Svix.Abstractions
{
public interface ISvixClient
{
public IApplication Application { get; }
public IAuthentication Authentication { get; }
public IEndpoint Endpoint { get; }
public IEventType EventType { get; }
public IIntegration Integration { get; }
public IMessage Message { get; }
public IMessageAttempt MessageAttempt { get; }
public IHealth Health { get; }
public ILogger Logger { get; }
public bool Throw { get; }
}
}

View File

@@ -0,0 +1,9 @@
namespace Svix.Abstractions
{
public interface ISvixOptions
{
public string ServerUrl { get; }
public bool Throw { get; }
}
}

View File

@@ -0,0 +1,24 @@
using System;
using Microsoft.Extensions.Logging;
namespace Svix.Abstractions
{
public abstract class SvixResourceBase
{
private SvixResourceBase()
{
// empty
}
protected ILogger Logger => SvixClient.Logger;
protected readonly ISvixClient SvixClient;
protected bool Throw => SvixClient?.Throw ?? false;
protected SvixResourceBase(ISvixClient svixClient)
{
SvixClient = svixClient ?? throw new ArgumentNullException(nameof(svixClient));
}
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,283 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
namespace Svix
{
public sealed class Application : SvixResourceBase, IApplication
{
private readonly IApplicationApi _applicationApi;
public Application(ISvixClient svixClient, IApplicationApi applicationApi)
: base(svixClient)
{
_applicationApi = applicationApi ?? throw new ArgumentNullException(nameof(applicationApi));
}
public ApplicationOut Create(ApplicationIn application, ApplicationCreateOptions options = null, string idempotencyKey = default)
{
try
{
application = application ?? throw new ArgumentNullException(nameof(application));
var lApplication = _applicationApi.V1ApplicationCreate(
application,
options?.GetIfExists ?? false,
idempotencyKey);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Create)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<ApplicationOut> CreateAsync(ApplicationIn application, ApplicationCreateOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
application = application ?? throw new ArgumentNullException(nameof(application));
var lApplication = await _applicationApi.V1ApplicationCreateAsync(
application,
options?.GetIfExists ?? false,
idempotencyKey,
cancellationToken);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(CreateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public bool Delete(string appId, string idempotencyKey = default)
{
try
{
var lResponse = _applicationApi.V1ApplicationDeleteWithHttpInfo(appId);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Delete)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> DeleteAsync(string appId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _applicationApi.V1ApplicationDeleteWithHttpInfoAsync(
appId,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(DeleteAsync)} failed");
if (Throw)
throw;
return false;
}
}
public ApplicationOut Get(string appId, string idempotencyKey = default)
{
try
{
var lApplication = _applicationApi.V1ApplicationGet(appId);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Get)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<ApplicationOut> GetAsync(string appId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lApplication = await _applicationApi.V1ApplicationGetAsync(appId);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ListResponseApplicationOut List(ListOptions options = null, string idempotencyKey = default)
{
try
{
var lResponse = _applicationApi.V1ApplicationList(
options?.Limit,
options?.Iterator,
options?.Order);
return lResponse;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(List)} failed");
if (Throw)
throw;
return new ListResponseApplicationOut();
}
}
public async Task<ListResponseApplicationOut> ListAsync(ListOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _applicationApi.V1ApplicationListAsync(
options?.Limit,
options?.Iterator,
options?.Order,
cancellationToken);
return lResponse;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAsync)} failed");
if (Throw)
throw;
return new ListResponseApplicationOut();
}
}
public ApplicationOut Update(string appId, ApplicationIn application, string idempotencyKey = default)
{
try
{
var lApplication = _applicationApi.V1ApplicationUpdate(
appId,
application);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Update)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<ApplicationOut> UpdateAsync(string appId, ApplicationIn application, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lApplication = await _applicationApi.V1ApplicationUpdateAsync(
appId,
application,
cancellationToken);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(UpdateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ApplicationOut Patch(string appId, ApplicationPatch application, string idempotencyKey = default)
{
try
{
var lApplication = _applicationApi.V1ApplicationPatch(
appId,
application);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Patch)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<ApplicationOut> PatchAsync(string appId, ApplicationPatch application, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lApplication = await _applicationApi.V1ApplicationPatchAsync(
appId,
application,
cancellationToken);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(PatchAsync)} failed");
if (Throw)
throw;
return null;
}
}
}
}

View File

@@ -0,0 +1,152 @@
using System;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
namespace Svix
{
public sealed class Authentication : SvixResourceBase, IAuthentication
{
private readonly IAuthenticationApi _authenticationApi;
public Authentication(ISvixClient svixClient, IAuthenticationApi authenticationApi)
: base(svixClient)
{
_authenticationApi = authenticationApi ?? throw new ArgumentNullException(nameof(authenticationApi));
}
public AppPortalAccessOut GetAppPortalAccess(string appId, AppPortalAccessIn appPortalAccess, string idempotencyKey = default)
{
try
{
var lMessage = _authenticationApi.V1AuthenticationAppPortalAccess(
appId,
appPortalAccess,
idempotencyKey);
return lMessage;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAppPortalAccess)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<AppPortalAccessOut> GetAppPortalAccessAsync(string appId, AppPortalAccessIn appPortalAccess,
string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lMessage = await _authenticationApi.V1AuthenticationAppPortalAccessAsync(
appId,
appPortalAccess,
idempotencyKey);
return lMessage;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAppPortalAccessAsync)} failed");
if (Throw)
throw;
return null;
}
}
public DashboardAccessOut GetDashboardAccess(string appId, string idempotencyKey = default)
{
try
{
var lMessage = _authenticationApi.V1AuthenticationDashboardAccess(
appId,
idempotencyKey);
return lMessage;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetDashboardAccess)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<DashboardAccessOut> GetDashboardAccessAsync(string appId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lMessage = await _authenticationApi.V1AuthenticationDashboardAccessAsync(
appId,
idempotencyKey);
return lMessage;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetDashboardAccessAsync)} failed");
if (Throw)
throw;
return null;
}
}
public bool Logout(string idempotencyKey = default)
{
try
{
var lResult = _authenticationApi.V1AuthenticationLogoutWithHttpInfo(
idempotencyKey);
return lResult.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Logout)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> LogoutAsync(string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResult = await _authenticationApi.V1AuthenticationLogoutWithHttpInfoAsync(
idempotencyKey,
cancellationToken);
return lResult.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(LogoutAsync)} failed");
if (Throw)
throw;
return false;
}
}
}
}

View File

@@ -0,0 +1,112 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
namespace Svix
{
public sealed class BackgroundTask : SvixResourceBase, IBackgroundTask
{
private readonly IBackgroundTasksApi _backgroundTaskApi;
public BackgroundTask(ISvixClient svixClient, IBackgroundTasksApi backgroundTaskApi)
: base(svixClient)
{
_backgroundTaskApi = backgroundTaskApi ?? throw new ArgumentNullException(nameof(backgroundTaskApi));
}
public BackgroundTaskOut Get(string taskId, string idempotencyKey = default)
{
try
{
var lBackgroundTask = _backgroundTaskApi.GetBackgroundTask(taskId);
return lBackgroundTask;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Get)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<BackgroundTaskOut> GetAsync(string taskId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lBackgroundTask = await _backgroundTaskApi.GetBackgroundTaskAsync(taskId);
return lBackgroundTask;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ListResponseBackgroundTaskOut List(BackgroundTaskListOptions options = null, string idempotencyKey = default)
{
try
{
var lResponse = _backgroundTaskApi.ListBackgroundTasks(
options?.Status,
options?.Task,
options?.Limit,
options?.Iterator,
options?.Order);
return lResponse;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(List)} failed");
if (Throw)
throw;
return new ListResponseBackgroundTaskOut();
}
}
public async Task<ListResponseBackgroundTaskOut> ListAsync(BackgroundTaskListOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _backgroundTaskApi.ListBackgroundTasksAsync(
options?.Status,
options?.Task,
options?.Limit,
options?.Iterator,
options?.Order,
cancellationToken);
return lResponse;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAsync)} failed");
if (Throw)
throw;
return new ListResponseBackgroundTaskOut();
}
}
}
}

View File

@@ -0,0 +1,853 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
namespace Svix
{
public sealed class Endpoint : SvixResourceBase, IEndpoint
{
private readonly IEndpointApi _endpointApi;
public Endpoint(ISvixClient svixClient, IEndpointApi endpoingApi)
: base(svixClient)
{
_endpointApi = endpoingApi ?? throw new ArgumentNullException(nameof(_endpointApi));
}
public EndpointOut Create(string appId, EndpointIn endpoint, string idempotencyKey = default)
{
try
{
var lEndpoint = _endpointApi.V1EndpointCreate(
appId,
endpoint,
idempotencyKey);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Create)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointOut> CreateAsync(string appId, EndpointIn endpoint, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lEndpoint = await _endpointApi.V1EndpointCreateAsync(
appId,
endpoint,
idempotencyKey,
cancellationToken);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(CreateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public bool Delete(string appId, string endpointId, string idempotencyKey = default)
{
try
{
var lResponse = _endpointApi.V1EndpointDeleteWithHttpInfo(
appId,
endpointId);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Delete)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> DeleteAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _endpointApi.V1EndpointDeleteWithHttpInfoAsync(
appId,
endpointId,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(DeleteAsync)} failed");
if (Throw)
throw;
return false;
}
}
public EndpointOut Get(string appId, string endpointId, string idempotencyKey = default)
{
try
{
var lEndpoint = _endpointApi.V1EndpointGet(
appId,
endpointId);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Get)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointOut> GetAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lEndpoint = await _endpointApi.V1EndpointGetAsync(
appId,
endpointId,
cancellationToken);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAsync)} failed");
if (Throw)
throw;
return null;
}
}
public EndpointHeadersOut GetHeaders(string appId, string endpointId, string idempotencyKey = default)
{
try
{
var lHeaders = _endpointApi.V1EndpointGetHeaders(
appId,
endpointId);
return lHeaders;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetHeaders)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointHeadersOut> GetHeadersAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lHeaders = await _endpointApi.V1EndpointGetHeadersAsync(
appId,
endpointId,
cancellationToken);
return lHeaders;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetHeadersAsync)} failed");
if (Throw)
throw;
return null;
}
}
public string GetSecret(string appId, string endpointId, string idempotencyKey = default)
{
try
{
var lSecret = _endpointApi.V1EndpointGetSecret(
appId,
endpointId);
return lSecret?.Key;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetSecret)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<string> GetSecretAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lSecret = await _endpointApi.V1EndpointGetSecretAsync(
appId,
endpointId,
cancellationToken);
return lSecret.Key;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetSecretAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ListResponseEndpointOut List(string appId, ListOptions options = null, string idempotencyKey = default)
{
try
{
var lEndpoints = _endpointApi.V1EndpointList(
appId,
options?.Limit,
options?.Iterator,
options?.Order);
return lEndpoints;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(List)} failed");
if (Throw)
throw;
return new ListResponseEndpointOut();
}
}
public async Task<ListResponseEndpointOut> ListAsync(string appId, ListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lEndpoints = await _endpointApi.V1EndpointListAsync(
appId,
options?.Limit,
options?.Iterator,
options?.Order,
cancellationToken);
return lEndpoints;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAsync)} failed");
if (Throw)
throw;
return new ListResponseEndpointOut();
}
}
public bool PatchHeaders(string appId, string endpointId, EndpointHeadersPatchIn headers, string idempotencyKey = default)
{
try
{
var lResponse = _endpointApi.V1EndpointPatchHeadersWithHttpInfo(
appId,
endpointId,
headers);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(PatchHeaders)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> PatchHeadersAsync(string appId, string endpointId, EndpointHeadersPatchIn headers, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _endpointApi.V1EndpointPatchHeadersWithHttpInfoAsync(
appId,
endpointId,
headers,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(PatchHeadersAsync)} failed");
if (Throw)
throw;
return false;
}
}
public bool Recover(string appId, string endpointId, RecoverIn recover, string idempotencyKey = default)
{
try
{
var lResponse = _endpointApi.V1EndpointRecoverWithHttpInfo(
appId,
endpointId,
recover,
idempotencyKey);
return lResponse.StatusCode == HttpStatusCode.Accepted;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Recover)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> RecoverAsync(string appId, string endpointId, RecoverIn recover, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _endpointApi.V1EndpointRecoverWithHttpInfoAsync(
appId,
endpointId,
recover,
idempotencyKey,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.Accepted;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(RecoverAsync)} failed");
if (Throw)
throw;
return false;
}
}
public bool RotateSecret(string appId, string endpointId, EndpointSecretRotateIn secret, string idempotencyKey = default)
{
try
{
var lResponse = _endpointApi.V1EndpointRotateSecretWithHttpInfo(
appId,
endpointId,
secret,
idempotencyKey);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(RotateSecret)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> RotateSecretAsync(string appId, string endpointId, EndpointSecretRotateIn secret, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _endpointApi.V1EndpointRotateSecretWithHttpInfoAsync(
endpointId,
appId,
secret,
idempotencyKey);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(RotateSecretAsync)} failed");
if (Throw)
throw;
return false;
}
}
public EndpointOut Update(string appId, string endpointId, EndpointUpdate endpoint, string idempotencyKey = default)
{
try
{
var lEndpoint = _endpointApi.V1EndpointUpdate(
appId,
endpointId,
endpoint);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Update)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointOut> UpdateAsync(string appId, string endpointId, EndpointUpdate endpoint, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lEndpoint = await _endpointApi.V1EndpointUpdateAsync(
appId,
endpointId,
endpoint,
cancellationToken);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(UpdateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public EndpointOut Patch(string appId, string endpointId, EndpointPatch endpoint, string idempotencyKey = default)
{
try
{
var lEndpoint = _endpointApi.V1EndpointPatch(
appId,
endpointId,
endpoint);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Patch)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointOut> PatchAsync(string appId, string endpointId, EndpointPatch endpoint, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lEndpoint = await _endpointApi.V1EndpointPatchAsync(
appId,
endpointId,
endpoint,
cancellationToken);
return lEndpoint;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(PatchAsync)} failed");
if (Throw)
throw;
return null;
}
}
public bool UpdateHeaders(string appId, string endpointId, EndpointHeadersIn headers, string idempotencyKey = default)
{
try
{
var lResponse = _endpointApi.V1EndpointUpdateHeadersWithHttpInfo(
appId,
endpointId,
headers);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(UpdateHeaders)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> UpdateHeadersAsync(string appId, string endpointId, EndpointHeadersIn headers, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _endpointApi.V1EndpointUpdateHeadersWithHttpInfoAsync(
appId,
endpointId,
headers,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(UpdateHeadersAsync)} failed");
if (Throw)
throw;
return false;
}
}
public EndpointStats GetStats(string appId, string endpointId, string idempotencyKey = default)
{
try
{
var lStats = _endpointApi.V1EndpointGetStats(
appId,
endpointId,
null,
null);
return lStats;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetStats)} failed");
if (Throw)
throw;
return null;
}
}
public EndpointStats GetStatsWithOptions(string appId, string endpointId, EndpointStatsOptions options = null, string idempotencyKey = default)
{
try
{
var lStats = _endpointApi.V1EndpointGetStats(
appId,
endpointId,
options?.Since,
options?.Until);
return lStats;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetStatsWithOptions)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointStats> GetStatsAsync(string appId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lStats = await _endpointApi.V1EndpointGetStatsAsync(
appId,
endpointId,
null,
null,
cancellationToken);
return lStats;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetStatsAsync)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointStats> GetStatsWithOptionsAsync(string appId, string endpointId, EndpointStatsOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lStats = await _endpointApi.V1EndpointGetStatsAsync(
appId,
endpointId,
options?.Since,
options?.Until,
cancellationToken);
return lStats;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetStatsWithOptionsAsync)} failed");
if (Throw)
throw;
return null;
}
}
public bool ReplayMissing(string appId, string endpointId, ReplayIn replayIn,
string idempotencyKey = default)
{
try
{
var response = _endpointApi.V1EndpointReplayWithHttpInfo(
appId,
endpointId,
replayIn,
idempotencyKey);
return response.StatusCode == HttpStatusCode.Accepted;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ReplayMissing)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> ReplayMissingAsync(string appId, string endpointId, ReplayIn replayIn,
string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var response = await _endpointApi.V1EndpointReplayWithHttpInfoAsync(
appId,
endpointId,
replayIn,
idempotencyKey,
cancellationToken);
return response.StatusCode == HttpStatusCode.Accepted;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ReplayMissingAsync)} failed");
if (Throw)
throw;
return false;
}
}
public EndpointTransformationOut TransformationGet(string appId, string endpointId, string idempotencyKey = default)
{
try
{
var lTransformation = _endpointApi.V1EndpointTransformationGet(
appId,
endpointId);
return lTransformation;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(TransformationGet)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EndpointTransformationOut> TransformationGetAsync(string appId, string endpointId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lTransformation = await _endpointApi.V1EndpointTransformationGetAsync(
appId,
endpointId,
cancellationToken);
return lTransformation;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(TransformationGetAsync)} failed");
if (Throw)
throw;
return null;
}
}
public bool TransformationPartialUpdate(string appId, string endpointId, EndpointTransformationIn endpointTransformationIn, string idempotencyKey = default)
{
try
{
var response = _endpointApi.V1EndpointTransformationPartialUpdateWithHttpInfo(
appId,
endpointId,
endpointTransformationIn);
return response.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(TransformationGet)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> TransformationPartialUpdateAsync(string appId, string endpointId, EndpointTransformationIn endpointTransformationIn, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var response = await _endpointApi.V1EndpointTransformationPartialUpdateWithHttpInfoAsync(
appId,
endpointId,
endpointTransformationIn,
cancellationToken);
return response.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(TransformationPartialUpdateAsync)} failed");
if (Throw)
throw;
return false;
}
}
public MessageOut SendExample(string appId, string endpointId, EventExampleIn eventExampleIn, string idempotencyKey = default)
{
try
{
var response = _endpointApi.V1EndpointSendExample(
appId,
endpointId,
eventExampleIn,
idempotencyKey);
return response;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(SendExample)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<MessageOut> SendExampleAsync(string appId, string endpointId, EventExampleIn eventExampleIn, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var response = await _endpointApi.V1EndpointSendExampleAsync(
appId,
endpointId,
eventExampleIn,
idempotencyKey);
return response;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(SendExampleAsync)} failed");
if (Throw)
throw;
return null;
}
}
}
}

View File

@@ -0,0 +1,332 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
namespace Svix
{
public sealed class EventType : SvixResourceBase, IEventType
{
private readonly IEventTypeApi _eventTypeApi;
public EventType(ISvixClient svixClient, IEventTypeApi eventTypeApi)
: base(svixClient)
{
_eventTypeApi = eventTypeApi ?? throw new ArgumentNullException(nameof(eventTypeApi));
}
public bool Archive(string eventType, bool? expunge = null, string idempotencyKey = default)
{
try
{
var lResponse = _eventTypeApi.V1EventTypeDeleteWithHttpInfo(
eventType,
expunge);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Archive)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> ArchiveAsync(string eventType, bool? expunge = null, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _eventTypeApi.V1EventTypeDeleteWithHttpInfoAsync(
eventType,
expunge,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ArchiveAsync)} failed");
if (Throw)
throw;
return false;
}
}
public EventTypeOut Create(EventTypeIn eventType, string idempotencyKey = default)
{
try
{
var lEventType = _eventTypeApi.V1EventTypeCreate(
eventType,
idempotencyKey);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Create)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EventTypeOut> CreateAsync(EventTypeIn eventType, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lEventType = await _eventTypeApi.V1EventTypeCreateAsync(
eventType,
idempotencyKey,
cancellationToken);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(CreateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public EventTypeOut Get(string eventType, string idempotencyKey = default)
{
try
{
var lEventType = _eventTypeApi.V1EventTypeGet(eventType);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Get)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EventTypeOut> GetAsync(string eventType, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lEventType = await _eventTypeApi.V1EventTypeGetAsync(
eventType,
cancellationToken);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ListResponseEventTypeOut List(EventTypeListOptions options = null, string idempotencyKey = default)
{
try
{
var lResults = _eventTypeApi.V1EventTypeList(
options?.Limit,
options?.Iterator,
null,
options?.IncludeArchived,
options?.WithContent);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(List)} failed");
if (Throw)
throw;
return new ListResponseEventTypeOut();
}
}
public async Task<ListResponseEventTypeOut> ListAsync(EventTypeListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResults = await _eventTypeApi.V1EventTypeListAsync(
options?.Limit,
options?.Iterator,
null,
options?.IncludeArchived,
options?.WithContent,
cancellationToken);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAsync)} failed");
if (Throw)
throw;
return new ListResponseEventTypeOut();
}
}
public EventTypeOut Update(string eventType, EventTypeUpdate update, string idempotencyKey = default)
{
try
{
var lEventType = _eventTypeApi.V1EventTypeUpdate(
eventType,
update);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Update)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EventTypeOut> UpdateAsync(string eventType, EventTypeUpdate update, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lEventType = await _eventTypeApi.V1EventTypeUpdateAsync(
eventType,
update,
cancellationToken);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(UpdateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public EventTypeOut Patch(string eventType, EventTypePatch update, string idempotencyKey = default)
{
try
{
var lEventType = _eventTypeApi.V1EventTypePatch(
eventType,
update);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Patch)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EventTypeOut> PatchAsync(string eventType, EventTypePatch update, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lEventType = await _eventTypeApi.V1EventTypePatchAsync(
eventType,
update,
cancellationToken);
return lEventType;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(PatchAsync)} failed");
if (Throw)
throw;
return null;
}
}
public EventTypeImportOpenApiOut ImportOpenApi(EventTypeImportOpenApiIn eventTypeImportOpenApiIn, string idempotencyKey = default)
{
try
{
var lEventTypeImportOpenApiOut = _eventTypeApi.V1EventTypeImportOpenapi(
eventTypeImportOpenApiIn,
idempotencyKey);
return lEventTypeImportOpenApiOut;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Create)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<EventTypeImportOpenApiOut> ImportOpenApiAsync(EventTypeImportOpenApiIn eventTypeImportOpenApiIn, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lEventTypeImportOpenApiOut = await _eventTypeApi.V1EventTypeImportOpenapiAsync(
eventTypeImportOpenApiIn,
idempotencyKey,
cancellationToken);
return lEventTypeImportOpenApiOut;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(CreateAsync)} failed");
if (Throw)
throw;
return null;
}
}
}
}

View File

@@ -0,0 +1,16 @@
using System;
namespace Svix.Exceptions
{
[Serializable]
public class WebhookVerificationException : Exception
{
public WebhookVerificationException() : base() { }
public WebhookVerificationException(string message) : base(message) { }
public WebhookVerificationException(string message, Exception inner) : base(message, inner) { }
protected WebhookVerificationException(System.Runtime.Serialization.SerializationInfo info,
System.Runtime.Serialization.StreamingContext context) : base(info, context) { }
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,203 @@
/*
* Svix API
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.1.1
* Generated by: https://github.com/openapitools/openapi-generator.git
*
* Manually modified to add `EmitDefaultValue = false` in all properties. Null fields are ignored for PATCH requests.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.IO;
using System.Runtime.Serialization;
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Linq;
using System.ComponentModel.DataAnnotations;
using FileParameter = Svix.Client.FileParameter;
using OpenAPIDateConverter = Svix.Client.OpenAPIDateConverter;
namespace Svix.Model
{
/// <summary>
/// ApplicationPatch
/// </summary>
[DataContract(Name = "ApplicationPatch")]
public partial class ApplicationPatch : IEquatable<ApplicationPatch>, IValidatableObject
{
/// <summary>
/// Initializes a new instance of the <see cref="ApplicationPatch" /> class.
/// </summary>
/// <param name="metadata">metadata.</param>
/// <param name="name">name.</param>
/// <param name="rateLimit">rateLimit.</param>
/// <param name="uid">The app&#39;s UID.</param>
public ApplicationPatch(Dictionary<string, string> metadata = default(Dictionary<string, string>), string name = default(string), int? rateLimit = default(int?), string uid = default(string))
{
this.Metadata = metadata;
this.Name = name;
this.RateLimit = rateLimit;
this.Uid = uid;
}
/// <summary>
/// Gets or Sets Metadata
/// </summary>
[DataMember(Name = "metadata", EmitDefaultValue = false)]
public Dictionary<string, string> Metadata { get; set; }
/// <summary>
/// Gets or Sets Name
/// </summary>
[DataMember(Name = "name", EmitDefaultValue = false)]
public string Name { get; set; }
/// <summary>
/// Gets or Sets RateLimit
/// </summary>
[DataMember(Name = "rateLimit", EmitDefaultValue = false)]
public int? RateLimit { get; set; }
/// <summary>
/// The app&#39;s UID
/// </summary>
/// <value>The app&#39;s UID</value>
[DataMember(Name = "uid", EmitDefaultValue = false)]
public string Uid { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class ApplicationPatch {\n");
sb.Append(" Metadata: ").Append(Metadata).Append("\n");
sb.Append(" Name: ").Append(Name).Append("\n");
sb.Append(" RateLimit: ").Append(RateLimit).Append("\n");
sb.Append(" Uid: ").Append(Uid).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return Newtonsoft.Json.JsonConvert.SerializeObject(this, Newtonsoft.Json.Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as ApplicationPatch);
}
/// <summary>
/// Returns true if ApplicationPatch instances are equal
/// </summary>
/// <param name="input">Instance of ApplicationPatch to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(ApplicationPatch input)
{
if (input == null)
return false;
return
(
this.Metadata == input.Metadata ||
this.Metadata != null &&
input.Metadata != null &&
this.Metadata.SequenceEqual(input.Metadata)
) &&
(
this.Name == input.Name ||
(this.Name != null &&
this.Name.Equals(input.Name))
) &&
(
this.RateLimit == input.RateLimit ||
(this.RateLimit != null &&
this.RateLimit.Equals(input.RateLimit))
) &&
(
this.Uid == input.Uid ||
(this.Uid != null &&
this.Uid.Equals(input.Uid))
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
if (this.Metadata != null)
hashCode = hashCode * 59 + this.Metadata.GetHashCode();
if (this.Name != null)
hashCode = hashCode * 59 + this.Name.GetHashCode();
if (this.RateLimit != null)
hashCode = hashCode * 59 + this.RateLimit.GetHashCode();
if (this.Uid != null)
hashCode = hashCode * 59 + this.Uid.GetHashCode();
return hashCode;
}
}
/// <summary>
/// To validate all properties of the instance
/// </summary>
/// <param name="validationContext">Validation context</param>
/// <returns>Validation Result</returns>
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext)
{
// RateLimit (int?) minimum
if (this.RateLimit < (int?)0)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for RateLimit, must be a value greater than or equal to 0.", new[] { "RateLimit" });
}
// Uid (string) maxLength
if (this.Uid != null && this.Uid.Length > 256)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Uid, length must be less than 256.", new[] { "Uid" });
}
// Uid (string) minLength
if (this.Uid != null && this.Uid.Length < 1)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Uid, length must be greater than 1.", new[] { "Uid" });
}
// Uid (string) pattern
Regex regexUid = new Regex(@"^[a-zA-Z0-9\\-_.]+$", RegexOptions.CultureInvariant);
if (false == regexUid.Match(this.Uid).Success)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Uid, must match a pattern of " + regexUid, new[] { "Uid" });
}
yield break;
}
}
}

View File

@@ -0,0 +1,311 @@
/*
* Svix API
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.1.1
* Generated by: https://github.com/openapitools/openapi-generator.git
*
* Manually modified to add `EmitDefaultValue = false` in all properties. Null fields are ignored for PATCH requests.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.IO;
using System.Runtime.Serialization;
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Linq;
using System.ComponentModel.DataAnnotations;
using FileParameter = Svix.Client.FileParameter;
using OpenAPIDateConverter = Svix.Client.OpenAPIDateConverter;
namespace Svix.Model
{
/// <summary>
/// EndpointPatch
/// </summary>
[DataContract(Name = "EndpointPatch")]
public partial class EndpointPatch : IEquatable<EndpointPatch>, IValidatableObject
{
/// <summary>
/// Initializes a new instance of the <see cref="EndpointPatch" /> class.
/// </summary>
/// <param name="channels">channels.</param>
/// <param name="description">description.</param>
/// <param name="disabled">disabled.</param>
/// <param name="filterTypes">filterTypes.</param>
/// <param name="metadata">metadata.</param>
/// <param name="rateLimit">rateLimit.</param>
/// <param name="secret">The endpoint&#39;s verification secret. If &#x60;null&#x60; is passed, a secret is automatically generated. Format: &#x60;base64&#x60; encoded random bytes optionally prefixed with &#x60;whsec_&#x60;. Recommended size: 24..</param>
/// <param name="uid">The ep&#39;s UID.</param>
/// <param name="url">url.</param>
/// <param name="version">version.</param>
public EndpointPatch(List<string> channels = default(List<string>), string description = default(string), bool disabled = default(bool), List<string> filterTypes = default(List<string>), Dictionary<string, string> metadata = default(Dictionary<string, string>), int? rateLimit = default(int?), string secret = default(string), string uid = default(string), string url = default(string), int version = default(int))
{
this.Channels = channels;
this.Description = description;
this.Disabled = disabled;
this.FilterTypes = filterTypes;
this.Metadata = metadata;
this.RateLimit = rateLimit;
this.Secret = secret;
this.Uid = uid;
this.Url = url;
this.Version = version;
}
/// <summary>
/// Gets or Sets Channels
/// </summary>
[DataMember(Name = "channels", EmitDefaultValue = false)]
public List<string> Channels { get; set; }
/// <summary>
/// Gets or Sets Description
/// </summary>
[DataMember(Name = "description", EmitDefaultValue = false)]
public string Description { get; set; }
/// <summary>
/// Gets or Sets Disabled
/// </summary>
[DataMember(Name = "disabled", EmitDefaultValue = false)]
public bool Disabled { get; set; }
/// <summary>
/// Gets or Sets FilterTypes
/// </summary>
[DataMember(Name = "filterTypes", EmitDefaultValue = false)]
public List<string> FilterTypes { get; set; }
/// <summary>
/// Gets or Sets Metadata
/// </summary>
[DataMember(Name = "metadata", EmitDefaultValue = false)]
public Dictionary<string, string> Metadata { get; set; }
/// <summary>
/// Gets or Sets RateLimit
/// </summary>
[DataMember(Name = "rateLimit", EmitDefaultValue = false)]
public int? RateLimit { get; set; }
/// <summary>
/// The endpoint&#39;s verification secret. If &#x60;null&#x60; is passed, a secret is automatically generated. Format: &#x60;base64&#x60; encoded random bytes optionally prefixed with &#x60;whsec_&#x60;. Recommended size: 24.
/// </summary>
/// <value>The endpoint&#39;s verification secret. If &#x60;null&#x60; is passed, a secret is automatically generated. Format: &#x60;base64&#x60; encoded random bytes optionally prefixed with &#x60;whsec_&#x60;. Recommended size: 24.</value>
[DataMember(Name = "secret", EmitDefaultValue = false)]
public string Secret { get; set; }
/// <summary>
/// The ep&#39;s UID
/// </summary>
/// <value>The ep&#39;s UID</value>
[DataMember(Name = "uid", EmitDefaultValue = false)]
public string Uid { get; set; }
/// <summary>
/// Gets or Sets Url
/// </summary>
[DataMember(Name = "url", EmitDefaultValue = false)]
public string Url { get; set; }
/// <summary>
/// Gets or Sets Version
/// </summary>
[DataMember(Name = "version", EmitDefaultValue = false)]
public int Version { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class EndpointPatch {\n");
sb.Append(" Channels: ").Append(Channels).Append("\n");
sb.Append(" Description: ").Append(Description).Append("\n");
sb.Append(" Disabled: ").Append(Disabled).Append("\n");
sb.Append(" FilterTypes: ").Append(FilterTypes).Append("\n");
sb.Append(" Metadata: ").Append(Metadata).Append("\n");
sb.Append(" RateLimit: ").Append(RateLimit).Append("\n");
sb.Append(" Secret: ").Append(Secret).Append("\n");
sb.Append(" Uid: ").Append(Uid).Append("\n");
sb.Append(" Url: ").Append(Url).Append("\n");
sb.Append(" Version: ").Append(Version).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return Newtonsoft.Json.JsonConvert.SerializeObject(this, Newtonsoft.Json.Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as EndpointPatch);
}
/// <summary>
/// Returns true if EndpointPatch instances are equal
/// </summary>
/// <param name="input">Instance of EndpointPatch to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(EndpointPatch input)
{
if (input == null)
return false;
return
(
this.Channels == input.Channels ||
this.Channels != null &&
input.Channels != null &&
this.Channels.SequenceEqual(input.Channels)
) &&
(
this.Description == input.Description ||
(this.Description != null &&
this.Description.Equals(input.Description))
) &&
(
this.Disabled == input.Disabled ||
this.Disabled.Equals(input.Disabled)
) &&
(
this.FilterTypes == input.FilterTypes ||
this.FilterTypes != null &&
input.FilterTypes != null &&
this.FilterTypes.SequenceEqual(input.FilterTypes)
) &&
(
this.Metadata == input.Metadata ||
this.Metadata != null &&
input.Metadata != null &&
this.Metadata.SequenceEqual(input.Metadata)
) &&
(
this.RateLimit == input.RateLimit ||
(this.RateLimit != null &&
this.RateLimit.Equals(input.RateLimit))
) &&
(
this.Secret == input.Secret ||
(this.Secret != null &&
this.Secret.Equals(input.Secret))
) &&
(
this.Uid == input.Uid ||
(this.Uid != null &&
this.Uid.Equals(input.Uid))
) &&
(
this.Url == input.Url ||
(this.Url != null &&
this.Url.Equals(input.Url))
) &&
(
this.Version == input.Version ||
this.Version.Equals(input.Version)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
if (this.Channels != null)
hashCode = hashCode * 59 + this.Channels.GetHashCode();
if (this.Description != null)
hashCode = hashCode * 59 + this.Description.GetHashCode();
hashCode = hashCode * 59 + this.Disabled.GetHashCode();
if (this.FilterTypes != null)
hashCode = hashCode * 59 + this.FilterTypes.GetHashCode();
if (this.Metadata != null)
hashCode = hashCode * 59 + this.Metadata.GetHashCode();
if (this.RateLimit != null)
hashCode = hashCode * 59 + this.RateLimit.GetHashCode();
if (this.Secret != null)
hashCode = hashCode * 59 + this.Secret.GetHashCode();
if (this.Uid != null)
hashCode = hashCode * 59 + this.Uid.GetHashCode();
if (this.Url != null)
hashCode = hashCode * 59 + this.Url.GetHashCode();
hashCode = hashCode * 59 + this.Version.GetHashCode();
return hashCode;
}
}
/// <summary>
/// To validate all properties of the instance
/// </summary>
/// <param name="validationContext">Validation context</param>
/// <returns>Validation Result</returns>
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext)
{
// RateLimit (int?) minimum
if (this.RateLimit < (int?)0)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for RateLimit, must be a value greater than or equal to 0.", new[] { "RateLimit" });
}
// Secret (string) pattern
Regex regexSecret = new Regex(@"^(whsec_)?[a-zA-Z0-9+\/=]{32,100}$", RegexOptions.CultureInvariant);
if (false == regexSecret.Match(this.Secret).Success)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Secret, must match a pattern of " + regexSecret, new[] { "Secret" });
}
// Uid (string) maxLength
if (this.Uid != null && this.Uid.Length > 256)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Uid, length must be less than 256.", new[] { "Uid" });
}
// Uid (string) minLength
if (this.Uid != null && this.Uid.Length < 1)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Uid, length must be greater than 1.", new[] { "Uid" });
}
// Uid (string) pattern
Regex regexUid = new Regex(@"^[a-zA-Z0-9\\-_.]+$", RegexOptions.CultureInvariant);
if (false == regexUid.Match(this.Uid).Success)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Uid, must match a pattern of " + regexUid, new[] { "Uid" });
}
// Version (int) minimum
if (this.Version < (int)1)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for Version, must be a value greater than or equal to 1.", new[] { "Version" });
}
yield break;
}
}
}

View File

@@ -0,0 +1,188 @@
/*
* Svix API
*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 1.1.1
* Generated by: https://github.com/openapitools/openapi-generator.git
*
* Manually modified to add `EmitDefaultValue = false` in all properties. Null fields are ignored for PATCH requests.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.IO;
using System.Runtime.Serialization;
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Linq;
using System.ComponentModel.DataAnnotations;
using FileParameter = Svix.Client.FileParameter;
using OpenAPIDateConverter = Svix.Client.OpenAPIDateConverter;
namespace Svix.Model
{
/// <summary>
/// EventTypePatch
/// </summary>
[DataContract(Name = "EventTypePatch")]
public partial class EventTypePatch : IEquatable<EventTypePatch>, IValidatableObject
{
/// <summary>
/// Initializes a new instance of the <see cref="EventTypePatch" /> class.
/// </summary>
/// <param name="archived">archived.</param>
/// <param name="description">description.</param>
/// <param name="featureFlag">featureFlag.</param>
/// <param name="schemas">schemas.</param>
public EventTypePatch(bool archived = default(bool), string description = default(string), string featureFlag = default(string), Dictionary<string, Object> schemas = default(Dictionary<string, Object>))
{
this.Archived = archived;
this.Description = description;
this.FeatureFlag = featureFlag;
this.Schemas = schemas;
}
/// <summary>
/// Gets or Sets Archived
/// </summary>
[DataMember(Name = "archived", EmitDefaultValue = false)]
public bool Archived { get; set; }
/// <summary>
/// Gets or Sets Description
/// </summary>
[DataMember(Name = "description", EmitDefaultValue = false)]
public string Description { get; set; }
/// <summary>
/// Gets or Sets FeatureFlag
/// </summary>
[DataMember(Name = "featureFlag", EmitDefaultValue = false)]
public string FeatureFlag { get; set; }
/// <summary>
/// Gets or Sets Schemas
/// </summary>
[DataMember(Name = "schemas", EmitDefaultValue = false)]
public Dictionary<string, Object> Schemas { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class EventTypePatch {\n");
sb.Append(" Archived: ").Append(Archived).Append("\n");
sb.Append(" Description: ").Append(Description).Append("\n");
sb.Append(" FeatureFlag: ").Append(FeatureFlag).Append("\n");
sb.Append(" Schemas: ").Append(Schemas).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return Newtonsoft.Json.JsonConvert.SerializeObject(this, Newtonsoft.Json.Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as EventTypePatch);
}
/// <summary>
/// Returns true if EventTypePatch instances are equal
/// </summary>
/// <param name="input">Instance of EventTypePatch to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(EventTypePatch input)
{
if (input == null)
return false;
return
(
this.Archived == input.Archived ||
this.Archived.Equals(input.Archived)
) &&
(
this.Description == input.Description ||
(this.Description != null &&
this.Description.Equals(input.Description))
) &&
(
this.FeatureFlag == input.FeatureFlag ||
(this.FeatureFlag != null &&
this.FeatureFlag.Equals(input.FeatureFlag))
) &&
(
this.Schemas == input.Schemas ||
this.Schemas != null &&
input.Schemas != null &&
this.Schemas.SequenceEqual(input.Schemas)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
hashCode = hashCode * 59 + this.Archived.GetHashCode();
if (this.Description != null)
hashCode = hashCode * 59 + this.Description.GetHashCode();
if (this.FeatureFlag != null)
hashCode = hashCode * 59 + this.FeatureFlag.GetHashCode();
if (this.Schemas != null)
hashCode = hashCode * 59 + this.Schemas.GetHashCode();
return hashCode;
}
}
/// <summary>
/// To validate all properties of the instance
/// </summary>
/// <param name="validationContext">Validation context</param>
/// <returns>Validation Result</returns>
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext)
{
// FeatureFlag (string) maxLength
if (this.FeatureFlag != null && this.FeatureFlag.Length > 256)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for FeatureFlag, length must be less than 256.", new[] { "FeatureFlag" });
}
// FeatureFlag (string) pattern
Regex regexFeatureFlag = new Regex(@"^[a-zA-Z0-9\\-_.]+$", RegexOptions.CultureInvariant);
if (false == regexFeatureFlag.Match(this.FeatureFlag).Success)
{
yield return new System.ComponentModel.DataAnnotations.ValidationResult("Invalid value for FeatureFlag, must match a pattern of " + regexFeatureFlag, new[] { "FeatureFlag" });
}
yield break;
}
}
}

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,11 @@
<?php
header('Expires: Mon, 26 Jul 1997 05:00:00 GMT');
header('Last-Modified: ' . gmdate('D, d M Y H:i:s') . ' GMT');
header('Cache-Control: no-store, no-cache, must-revalidate');
header('Cache-Control: post-check=0, pre-check=0', false);
header('Pragma: no-cache');
header('Location: ../');
exit;

View File

@@ -0,0 +1,61 @@
using System;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
namespace Svix
{
public sealed class Health : SvixResourceBase, IHealth
{
private readonly IHealthApi _healthApi;
public Health(ISvixClient svixClient, IHealthApi healthApi)
: base(svixClient)
{
_healthApi = healthApi ?? throw new ArgumentNullException(nameof(healthApi));
}
public bool IsHealthy(string idempotencyKey = default)
{
try
{
var lResponse = _healthApi.V1HealthGetWithHttpInfo();
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(IsHealthy)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> IsHealthyAsync(string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _healthApi.V1HealthGetWithHttpInfoAsync(cancellationToken)
.ConfigureAwait(false);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(IsHealthyAsync)} failed");
if (Throw)
throw;
return false;
}
}
}
}

View File

@@ -0,0 +1,340 @@
using System;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
namespace Svix
{
public sealed class Integration : SvixResourceBase, IIntegration
{
private readonly IIntegrationApi _integrationApi;
public Integration(ISvixClient svixClient, IIntegrationApi integrationApi) : base(svixClient)
{
_integrationApi = integrationApi ?? throw new ArgumentNullException(nameof(integrationApi));
}
public IntegrationOut Create(string appId, IntegrationIn integration, string idempotencyKey = default)
{
try
{
var lIntegration = _integrationApi.V1IntegrationCreate(
appId,
integration,
idempotencyKey);
return lIntegration;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Create)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<IntegrationOut> CreateAsync(string appId, IntegrationIn integration, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lIntegration = await _integrationApi.V1IntegrationCreateAsync(
appId,
integration,
idempotencyKey,
cancellationToken);
return lIntegration;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(CreateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public bool Delete(string appId, string integrationId, string idempotencyKey = default)
{
try
{
var lResponse = _integrationApi.V1IntegrationDeleteWithHttpInfo(
appId,
integrationId);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Delete)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> DeleteAsync(string appId, string integrationId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _integrationApi.V1IntegrationDeleteWithHttpInfoAsync(
appId,
integrationId,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(DeleteAsync)} failed");
if (Throw)
throw;
return false;
}
}
public IntegrationOut Get(string appId, string integrationId, string idempotencyKey = default)
{
try
{
var lIntegration = _integrationApi.V1IntegrationGet(
appId,
integrationId);
return lIntegration;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Get)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<IntegrationOut> GetAsync(string appId, string integrationId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lIntegration = await _integrationApi.V1IntegrationGetAsync(
appId,
integrationId,
cancellationToken);
return lIntegration;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAsync)} failed");
if (Throw)
throw;
return null;
}
}
public string GetKey(string appId, string integrationId, string idempotencyKey = default)
{
try
{
var lResponse = _integrationApi.V1IntegrationGetKey(
appId,
integrationId);
return lResponse.Key;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetKey)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<string> GetKeyAsync(string appId, string integrationId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _integrationApi.V1IntegrationGetKeyAsync(
appId,
integrationId,
cancellationToken);
return lResponse.Key;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetKeyAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ListResponseIntegrationOut List(string appId, ListOptions options = null, string idempotencyKey = default)
{
try
{
var lResult = _integrationApi.V1IntegrationList(
appId,
options?.Limit,
options?.Iterator,
options?.Order);
return lResult;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(List)} failed");
if (Throw)
throw;
return new ListResponseIntegrationOut();
}
}
public async Task<ListResponseIntegrationOut> ListAsync(string appId, ListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResult = await _integrationApi.V1IntegrationListAsync(
appId,
options?.Limit,
options?.Iterator,
options?.Order,
cancellationToken);
return lResult;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAsync)} failed");
if (Throw)
throw;
return new ListResponseIntegrationOut();
}
}
public string RotateKey(string appId, string integrationId, string idempotencyKey = default)
{
try
{
var lResponse = _integrationApi.V1IntegrationRotateKey(
appId,
integrationId,
idempotencyKey);
return lResponse.Key;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(RotateKey)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<string> RotateKeyAsync(string appId, string integrationId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _integrationApi.V1IntegrationRotateKeyAsync(
appId,
integrationId,
idempotencyKey,
cancellationToken);
return lResponse.Key;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(RotateKeyAsync)} failed");
if (Throw)
throw;
return null;
}
}
public IntegrationOut Update(string appId, string integrationId, IntegrationUpdate integration, string idempotencyKey = default)
{
try
{
var lIntegration = _integrationApi.V1IntegrationUpdate(
appId,
integrationId,
integration);
return lIntegration;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Update)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<IntegrationOut> UpdateAsync(string appId, string integrationId, IntegrationUpdate integration, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lIntegration = await _integrationApi.V1IntegrationUpdateAsync(
appId,
integrationId,
integration,
cancellationToken);
return lIntegration;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(UpdateAsync)} failed");
if (Throw)
throw;
return null;
}
}
}
}

View File

@@ -0,0 +1,220 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
namespace Svix
{
public sealed class Message : SvixResourceBase, IMessage
{
private readonly IMessageApi _messageApi;
public Message(ISvixClient svixClient, IMessageApi messageApi)
: base(svixClient)
{
_messageApi = messageApi ?? throw new ArgumentException(nameof(messageApi));
}
public MessageOut Create(string appId, MessageIn message, MessageCreateOptions options = null, string idempotencyKey = default)
{
try
{
message = message ?? throw new ArgumentNullException(nameof(message));
var lApplication = _messageApi.V1MessageCreate(
appId,
message,
options?.WithContent,
idempotencyKey);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Create)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<MessageOut> CreateAsync(string appId, MessageIn message, MessageCreateOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
message = message ?? throw new ArgumentNullException(nameof(message));
var lApplication = await _messageApi.V1MessageCreateAsync(
appId,
message,
options?.WithContent,
idempotencyKey,
cancellationToken);
return lApplication;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(CreateAsync)} failed");
if (Throw)
throw;
return null;
}
}
public MessageOut Get(string appId, string messageId, string idempotencyKey = default)
{
try
{
var lMessage = _messageApi.V1MessageGet(
appId,
messageId);
return lMessage;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(Get)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<MessageOut> GetAsync(string appId, string messageId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lMessage = await _messageApi.V1MessageGetAsync(
appId,
messageId);
return lMessage;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ListResponseMessageOut List(string appId, MessageListOptions options = null, string idempotencyKey = default)
{
try
{
var lResponse = _messageApi.V1MessageList(
appId,
options?.Limit,
options?.Iterator,
options?.Channel,
options?.Before,
options?.After,
options?.WithContent,
options?.Tag,
options?.EventTypes
);
return lResponse;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(List)} failed");
if (Throw)
throw;
return new ListResponseMessageOut();
}
}
public async Task<ListResponseMessageOut> ListAsync(string appId, MessageListOptions options = null, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _messageApi.V1MessageListAsync(
appId,
options?.Limit,
options?.Iterator,
options?.Channel,
options?.Before,
options?.After,
options?.WithContent,
options?.Tag,
options?.EventTypes,
cancellationToken);
return lResponse;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAsync)} failed");
if (Throw)
throw;
return new ListResponseMessageOut();
}
}
public bool ExpungeContent(string appId, string messageId, string idempotencyKey = default)
{
try
{
var lResponse = _messageApi.V1MessageExpungeContentWithHttpInfo(
appId,
messageId);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ExpungeContent)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> ExpungeContentAsync(string appId, string messageId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _messageApi.V1MessageExpungeContentWithHttpInfoAsync(
appId,
messageId,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ExpungeContentAsync)} failed");
if (Throw)
throw;
return false;
}
}
}
}

View File

@@ -0,0 +1,543 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Svix.Abstractions;
using Svix.Api;
using Svix.Client;
using Svix.Model;
using Svix.Models;
namespace Svix
{
public sealed class MessageAttempt : SvixResourceBase, IMessageAttempt
{
private readonly IMessageAttemptApi _messageAttemptApi;
public MessageAttempt(ISvixClient svixClient, IMessageAttemptApi messageAttemptApi)
: base(svixClient)
{
_messageAttemptApi = messageAttemptApi ?? throw new ArgumentNullException(nameof(messageAttemptApi));
}
public MessageAttemptOut GetAttempt(string appId, string attemptId, string messageId, string idempotencyKey = default)
{
try
{
var lAttempt = _messageAttemptApi.V1MessageAttemptGet(
appId,
messageId,
attemptId);
return lAttempt;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAttempt)} failed");
if (Throw)
throw;
return null;
}
}
public async Task<MessageAttemptOut> GetAttemptAsync(string appId, string attemptId, string messageId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lAttempt = await _messageAttemptApi.V1MessageAttemptGetAsync(
appId,
messageId,
attemptId,
cancellationToken);
return lAttempt;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(GetAttemptAsync)} failed");
if (Throw)
throw;
return null;
}
}
public ListResponseEndpointMessageOut ListAttemptedMessages(string appId, string endpointId, MessageAttemptListOptions options = null,
string idempotencyKey = default)
{
try
{
var lResults = _messageAttemptApi.V1MessageAttemptListAttemptedMessages(
appId,
endpointId,
options?.Limit,
options?.Iterator,
options?.Channel,
options?.Tag,
(Svix.Model.MessageStatus?)options?.Status,
options?.Before,
options?.After);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptedMessages)} failed");
if (Throw)
throw;
return new ListResponseEndpointMessageOut();
}
}
public async Task<ListResponseEndpointMessageOut> ListAttemptedMessagesAsync(string appId, string endpointId, MessageAttemptListOptions options = null,
string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResults = await _messageAttemptApi.V1MessageAttemptListAttemptedMessagesAsync(
appId,
endpointId,
options?.Limit,
options?.Iterator,
options?.Channel,
options?.Tag,
(MessageStatus?)options?.Status,
options?.Before,
options?.After,
options?.WithContent,
options?.EventTypes,
cancellationToken);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptedMessagesAsync)} failed");
if (Throw)
throw;
return new ListResponseEndpointMessageOut();
}
}
public ListResponseMessageAttemptOut ListAttemptsByEndpoint(string appId, string endpointId, AttemptsByEndpointListOptions options = null,
string idempotencyKey = default)
{
try
{
var lResults = _messageAttemptApi.V1MessageAttemptListByEndpoint(
appId,
endpointId,
options?.Limit,
options?.Iterator,
(Svix.Model.MessageStatus?)options?.Status,
(Svix.Model.StatusCodeClass?)options?.Code,
options?.Channel,
options?.Tag,
options?.Before,
options?.After,
options?.WithContent,
options?.WithMsg,
options?.EventTypes);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptsByEndpoint)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptOut();
}
}
public async Task<ListResponseMessageAttemptOut> ListAttemptsByEndpointAsync(string appId, string endpointId, AttemptsByEndpointListOptions options = null,
string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResults = await _messageAttemptApi.V1MessageAttemptListByEndpointAsync(
appId,
endpointId,
options?.Limit,
options?.Iterator,
(Svix.Model.MessageStatus?)options?.Status,
(Svix.Model.StatusCodeClass?)options?.Code,
options?.Channel,
options?.Tag,
options?.Before,
options?.After,
options?.WithContent,
options?.WithMsg,
options?.EventTypes,
cancellationToken);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptsByEndpointAsync)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptOut();
}
}
public ListResponseMessageAttemptOut ListAttemptsByMessage(string appId, string messageId, AttemptsByMessageListOptions options = null,
string idempotencyKey = default)
{
try
{
var lResults = _messageAttemptApi.V1MessageAttemptListByMsg(
appId,
messageId,
options?.Limit,
options?.Iterator,
(Svix.Model.MessageStatus?)options?.Status,
(Svix.Model.StatusCodeClass?)options?.Code,
options?.Channel,
options?.Tag,
options?.EndpointId,
options?.Before,
options?.After,
options?.WithContent,
options?.EventTypes);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptsByMessage)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptOut();
}
}
public async Task<ListResponseMessageAttemptOut> ListAttemptsByMessageAsync(string appId, string messageId, AttemptsByMessageListOptions options = null,
string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResults = await _messageAttemptApi.V1MessageAttemptListByMsgAsync(
appId,
messageId,
options?.Limit,
options?.Iterator,
(Svix.Model.MessageStatus?)options?.Status,
(Svix.Model.StatusCodeClass?)options?.Code,
options?.Channel,
options?.Tag,
options?.EndpointId,
options?.Before,
options?.After,
options?.WithContent,
options?.EventTypes,
cancellationToken);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptsByMessageAsync)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptOut();
}
}
// Deprecated
public ListResponseMessageAttemptEndpointOut ListAttemptsForEndpoint(string appId, string messageId,
string endpointId, AttemptsByEndpointListOptions options = null, string idempotencyKey = default)
{
try
{
var lResults = _messageAttemptApi.V1MessageAttemptListByEndpointDeprecated(
appId,
messageId,
endpointId,
options?.Limit,
options?.Iterator,
options?.Channel,
options?.Tag,
(Svix.Model.MessageStatus?)options?.Status,
options?.Before,
options?.After,
options?.EventTypes);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptsForEndpoint)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptEndpointOut();
}
}
// Deprecated
public async Task<ListResponseMessageAttemptEndpointOut> ListAttemptsForEndpointAsync(string appId,
string messageId, string endpointId, AttemptsByEndpointListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResults = await _messageAttemptApi.V1MessageAttemptListByEndpointDeprecatedAsync(
appId,
messageId,
endpointId,
options?.Limit,
options?.Iterator,
options?.Channel,
options?.Tag,
(Svix.Model.MessageStatus?)options?.Status,
options?.Before,
options?.After,
options?.EventTypes?.ToList(),
cancellationToken);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptsForEndpointAsync)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptEndpointOut();
}
}
// Deprecated
public ListResponseMessageAttemptOut ListAttempts(string appId, string messageId, MessageAttemptListOptions options = null,
string idempotencyKey = default)
{
try
{
var lResults = _messageAttemptApi.V1MessageAttemptListByMsgDeprecated(
appId,
messageId,
options?.Limit,
options?.Iterator,
options?.EndpointId,
options?.Channel,
options?.Tag,
(Svix.Model.MessageStatus?)options?.Status,
options?.Before,
options?.After,
null,
options?.EventTypes);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttempts)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptOut();
}
}
// Deprecated
public async Task<ListResponseMessageAttemptOut> ListAttemptsAsync(string appId, string messageId, MessageAttemptListOptions options = null,
string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResults = await _messageAttemptApi.V1MessageAttemptListByMsgDeprecatedAsync(
appId,
messageId,
options?.Limit,
options?.Iterator,
options?.EndpointId,
options?.Channel,
options?.Tag,
(Svix.Model.MessageStatus?)options?.Status,
options?.Before,
options?.After,
null,
options?.EventTypes,
cancellationToken);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptsAsync)} failed");
if (Throw)
throw;
return new ListResponseMessageAttemptOut();
}
}
public ListResponseMessageEndpointOut ListAttemptedDestinations(string appId, string messageId, ListOptions options = null,
string idempotencyKey = default)
{
try
{
var lResults = _messageAttemptApi.V1MessageAttemptListAttemptedDestinations(
appId,
messageId,
options?.Limit,
options?.Iterator);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptedDestinations)} failed");
if (Throw)
throw;
return new ListResponseMessageEndpointOut();
}
}
public async Task<ListResponseMessageEndpointOut> ListAttemptedDestinationsAsync(string appId, string messageId,
ListOptions options = null, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResults = await _messageAttemptApi.V1MessageAttemptListAttemptedDestinationsAsync(
appId,
messageId,
options?.Limit,
options?.Iterator,
cancellationToken);
return lResults;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ListAttemptedDestinationsAsync)} failed");
if (Throw)
throw;
return new ListResponseMessageEndpointOut();
}
}
public bool ResendWebhook(string appId, string messageId, string endpointId, string idempotencyKey = default)
{
try
{
var lResponse = _messageAttemptApi.V1MessageAttemptResendWithHttpInfo(
appId,
messageId,
endpointId,
idempotencyKey);
return lResponse.StatusCode == HttpStatusCode.Accepted;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ResendWebhook)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> ResendWebhookAsync(string appId, string messageId, string endpointId, string idempotencyKey = default,
CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _messageAttemptApi.V1MessageAttemptResendWithHttpInfoAsync(
appId,
messageId,
endpointId,
idempotencyKey,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.Accepted;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ResendWebhookAsync)} failed");
if (Throw)
throw;
return false;
}
}
public bool ExpungeContent(string appId, string messageId, string attemptId, string idempotencyKey = default)
{
try
{
var lResponse = _messageAttemptApi.V1MessageAttemptExpungeContentWithHttpInfo(
appId,
messageId,
attemptId);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ExpungeContent)} failed");
if (Throw)
throw;
return false;
}
}
public async Task<bool> ExpungeContentAsync(string appId, string messageId, string attemptId, string idempotencyKey = default, CancellationToken cancellationToken = default)
{
try
{
var lResponse = await _messageAttemptApi.V1MessageAttemptExpungeContentWithHttpInfoAsync(
appId,
messageId,
attemptId,
cancellationToken);
return lResponse.StatusCode == HttpStatusCode.NoContent;
}
catch (ApiException e)
{
Logger?.LogError(e, $"{nameof(ExpungeContentAsync)} failed");
if (Throw)
throw;
return false;
}
}
}
}

View File

@@ -0,0 +1,9 @@
using Svix.Models;
namespace Svix.Models
{
public class ApplicationCreateOptions
{
public bool GetIfExists { get; set; }
}
}

View File

@@ -0,0 +1,26 @@
using System;
using System.Collections.Generic;
namespace Svix.Models
{
public sealed class AttemptsByEndpointListOptions : ListOptions
{
public int? Status { get; set; }
public int? Code { get; set; }
public List<string> EventTypes { get; set; }
public string? Channel { get; set; }
public DateTime? Before { get; set; }
public DateTime? After { get; set; }
public bool? WithContent { get; set; }
public bool? WithMsg { get; set; }
public string? Tag { get; set; }
}
}

View File

@@ -0,0 +1,26 @@
using System;
using System.Collections.Generic;
namespace Svix.Models
{
public sealed class AttemptsByMessageListOptions : ListOptions
{
public string? EndpointId { get; set; }
public int? Status { get; set; }
public int? Code { get; set; }
public List<string> EventTypes { get; set; }
public string? Channel { get; set; }
public DateTime? Before { get; set; }
public DateTime? After { get; set; }
public bool? WithContent { get; set; }
public string? Tag { get; set; }
}
}

View File

@@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
using Svix.Model;
namespace Svix.Models
{
public sealed class BackgroundTaskListOptions : ListOptions
{
public BackgroundTaskStatus? Status { get; set; }
public BackgroundTaskType? Task { get; set; }
}
}

View File

@@ -0,0 +1,12 @@
using System;
using System.Collections.Generic;
namespace Svix.Models
{
public sealed class EndpointStatsOptions
{
public DateTime? Since { get; set; }
public DateTime? Until { get; set; }
}
}

View File

@@ -0,0 +1,9 @@
namespace Svix.Models
{
public sealed class EventTypeListOptions : ListOptions
{
public bool? WithContent { get; set; }
public bool? IncludeArchived { get; set; }
}
}

Some files were not shown because too many files have changed in this diff Show More