added jambonz small deployment

This commit is contained in:
Dave Horton
2020-03-19 13:50:14 -04:00
parent ac5e437541
commit 619a01cacf
31 changed files with 1240 additions and 32 deletions

View File

@@ -1,19 +0,0 @@
#!/bin/bash
DATADOG_API_KEY=$1
DATADOG_ENDPOINT=$2
DATADOG_ENV_NAME=$3
# install datadog
DD_INSTALL_ONLY=true DD_API_KEY=${DATADOG_API_KEY} bash -c "$(curl -L https://raw.githubusercontent.com/DataDog/datadog-agent/master/cmd/agent/install_script.sh)"
sudo rm /etc/datadog-agent/conf.d/*
sudo sed -i -e 's@^# dd_url:.*@'"dd_url: $DATADOG_ENDPOINT"'@g' /etc/datadog-agent/datadog.yaml
sudo sed -i -e 's@^# log_level:.*@log_level: warning@g' /etc/datadog-agent/datadog.yaml
sudo tee -a /etc/datadog-agent/datadog.yaml > /dev/null <<EOT
tags:
env_name:$DATADOG_ENV_NAME
role:sbc-rtp
EOT
systemctl enable datadog-agent

View File

@@ -1,12 +0,0 @@
#!/bin/bash
FLUENTD_ENDPOINT=$1
wget -qO - https://packages.fluentbit.io/fluentbit.key | sudo apt-key add -
echo "deb https://packages.fluentbit.io/debian/stretch stretch main" | sudo tee -a /etc/apt/sources.list
sudo apt-get update
sudo apt-get install td-agent-bit
sudo mv /tmp/td-agent-bit.conf /etc/td-agent-bit
sudo sed -i -e 's!\(^.*Host\).*!'"\1 $FLUENTD_ENDPOINT"'!g' /etc/td-agent-bit/td-agent-bit.conf
systemctl daemon-reload
systemctl enable td-agent-bit

View File

@@ -0,0 +1,46 @@
# packer-jambonz-sbc-sip-rtp
A [packer](https://www.packer.io/) template to build an AMI containing everything needed to run the SBC functionality of jambonz,
## Installing
```
$ packer build -color=false template.json
```
### variables
There are many variables that can be specified on the `packer build` command line; however defaults (which are shown below) are appropriate for building an "all in one" jambonz server, so you generally should not need to specify values.
```
"region": "us-east-1"
```
The region to create the AMI in
```
"ami_description": "jambonz SBC SIP+RTP"
```
AMI description.
```
"instance_type": "t2.medium"
```
EC2 Instance type to use when building the AMI.
```
"drachtio_version": "v0.8.4"
```
drachtio tag or branch to build
```
```
"rtp_engine_version": "mr7.4.1.5",
```
rtpengine version
```
"rtp_engine_min_port": "40000",
"rtp_engine_max_port": "60000"
```
rtp port range for rtpengine

View File

@@ -0,0 +1,6 @@
/var/log/apiban-client.log {
daily
copytruncate
rotate 7
compress
}

View File

@@ -0,0 +1,5 @@
{
"APIKEY":"12c2a03e1ec7c467c07ce30cab621734",
"LKID":"0",
"VERSION":"0.3"
}

View File

@@ -0,0 +1,18 @@
# Fail2Ban filter for drachtio spammer detection
#
[INCLUDES]
# Read common prefixes. If any customizations available -- read them from
# common.local
before = common.conf
[Definition]
_daemon = drachtio
__pid_re = (?:\[\d+\])
failregex = detected potential spammer from <HOST>:\d+
ignoreregex =

View File

@@ -0,0 +1,35 @@
<drachtio>
<!-- udp port to listen on for client connections and shared secret used to authenticate clients -->
<admin port="9022" secret="cymru">127.0.0.1</admin>
<request-handlers>
<request-handler sip-method="INVITE">http://127.0.0.1:4000</request-handler>
</request-handlers>
<sip>
<contacts>
</contacts>
<udp-mtu>4096</udp-mtu>
</sip>
<cdrs>false</cdrs>
<logging>
<file>
<name>/var/log/drachtio/drachtio.log</name>
<archive>/var/log/drachtio/archive</archive>
<size>100</size>
<maxSize>10000</maxSize>
<auto-flush>true</auto-flush>
</file>
<sofia-loglevel>3</sofia-loglevel>
<loglevel>info</loglevel>
</logging>
</drachtio>

View File

@@ -0,0 +1,32 @@
[Unit]
Description=drachtio
After=syslog.target network.target local-fs.target
[Service]
; service
Type=forking
ExecStartPre=/bin/sh -c 'systemctl set-environment LOCAL_IP=`curl -s http://169.254.169.254/latest/meta-data/local-ipv4`'
ExecStartPre=/bin/sh -c 'systemctl set-environment PUBLIC_IP=`curl -s http://169.254.169.254/latest/meta-data/public-ipv4`'
ExecStart=/usr/local/bin/drachtio --daemon --contact sip:${LOCAL_IP};transport=udp --external-ip ${PUBLIC_IP} \
--contact sip:${LOCAL_IP};transport=tcp \
--address 0.0.0.0 --port 9022
TimeoutSec=15s
Restart=always
; exec
User=root
Group=daemon
LimitCORE=infinity
LimitNOFILE=100000
LimitNPROC=60000
;LimitSTACK=240
LimitRTPRIO=infinity
LimitRTTIME=7000000
IOSchedulingClass=realtime
IOSchedulingPriority=2
CPUSchedulingPolicy=rr
CPUSchedulingPriority=89
UMask=0007
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,139 @@
module.exports = {
apps : [
{
name: 'jambonz-api-server',
cwd: '/home/admin/apps/jambonz-api-server',
script: 'app.js',
out_file: '/home/admin/.pm2/logs/jambonz-api-server.log',
err_file: '/home/admin/.pm2/logs/jambonz-api-server.log',
combine_logs: true,
instance_var: 'INSTANCE_ID',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
JAMBONES_MYSQL_HOST: '$${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '$${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '$${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '$${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
JAMBONES_LOGLEVEL: 'info',
JAMBONE_API_VERSION: 'v1',
JAMBONES_CREATE_CALL_URL: 'http://127.0.0.1:4001/v1/createCall',
HTTP_PORT: 3000
},
},
{
name: 'sbc-call-router',
cwd: '/home/admin/apps/sbc-call-router',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-call-router.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-call-router.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
HTTP_PORT: 4000,
JAMBONES_INBOUND_ROUTE: '127.0.0.1:4002',
JAMBONES_OUTBOUND_ROUTE: '127.0.0.1:4003',
JAMBONZ_TAGGED_INBOUND: 1,
JAMBONES_NETWORK_CIDR: '172.31.32.0/24'
}
},
{
name: 'sbc-registrar',
cwd: '/home/admin/apps/sbc-registrar',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-registrar.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-registrar.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
ENABLE_DATADOG_METRICS: 0,
JAMBONES_LOGLEVEL: 'debug',
DRACHTIO_HOST: '127.0.0.1',
DRACHTIO_PORT: 9022,
DRACHTIO_SECRET: 'cymru',
JAMBONES_MYSQL_HOST: '$${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '$${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '$${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '$${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
}
},
{
name: 'sbc-outbound',
cwd: '/home/admin/apps/sbc-outbound',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-outbound.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-outbound.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
JAMBONES_LOGLEVEL: 'debug',
ENABLE_DATADOG_METRICS: 0,
DRACHTIO_HOST: '127.0.0.1',
DRACHTIO_PORT: 9022,
DRACHTIO_SECRET: 'cymru',
JAMBONES_RTPENGINES: '127.0.0.1:22222',
JAMBONES_MYSQL_HOST: '$${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '$${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '$${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '$${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
}
},
{
name: 'sbc-inbound',
cwd: '/home/admin/apps/sbc-inbound',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-inbound.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-inbound.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
JAMBONES_LOGLEVEL: 'debug',
ENABLE_DATADOG_METRICS: 0,
DRACHTIO_HOST: '127.0.0.1',
DRACHTIO_PORT: 9022,
DRACHTIO_SECRET: 'cymru',
JAMBONES_FEATURE_SERVERS: '172.31.3.33:5070',
JAMBONES_RTPENGINES: '127.0.0.1:22222',
JAMBONES_MYSQL_HOST: '$${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '$${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '$${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '$${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
}
}]
};

View File

@@ -0,0 +1,42 @@
[Unit]
Description=rtpengine
After=syslog.target network.target local-fs.target
[Service]
; service
Type=forking
ExecStartPre=/bin/sh -c 'systemctl set-environment LOCAL_IP=`curl -s http://169.254.169.254/latest/meta-data/local-ipv4`'
ExecStartPre=/bin/sh -c 'systemctl set-environment PUBLIC_IP=`curl -s http://169.254.169.254/latest/meta-data/public-ipv4`'
ExecStart=/usr/local/bin/rtpengine --interface ${LOCAL_IP}!${PUBLIC_IP} \
--listen-ng=22222 \
--listen-udp=12222 \
--listen-cli=127.0.0.1:9900 \
--pidfile /var/run/rtpengine.pid \
--port-min 40000 \
--port-max 60000 \
--recording-dir /tmp \
--recording-method pcap \
--recording-format eth \
--log-level 5 \
--delete-delay 0
PIDFile=/var/run/rtpengine.pid
TimeoutSec=15s
Restart=always
; exec
User=root
Group=daemon
LimitCORE=infinity
LimitNOFILE=100000
LimitNPROC=60000
;LimitSTACK=240
LimitRTPRIO=infinity
LimitRTTIME=7000000
IOSchedulingClass=realtime
IOSchedulingPriority=2
CPUSchedulingPolicy=rr
CPUSchedulingPriority=89
UMask=0007
[Install]
WantedBy=multi-user.target

View File

@@ -0,0 +1,5 @@
source /usr/share/vim/vim80/defaults.vim
let skip_defaults_vim = 1
if has('mouse')
set mouse=r
endif

View File

@@ -0,0 +1,12 @@
#!/bin/bash
cd /usr/local/src/
git clone https://github.com/palner/apiban.git
sudo mkdir /usr/local/bin/apiban && sudo chmod 0755 /usr/local/bin/apiban
sudo cp -r /usr/local/src/apiban/clients/go/apiban-iptables-client /usr/local/bin/apiban && sudo chmod +x /usr/local/bin/apiban/apiban-iptables-client
sudo cp /tmp/config.json /usr/local/bin/apiban/config.json
sudo chmod 0644 /usr/local/bin/apiban/config.json
sudo cp /tmp/apiban.logrotate /etc/logrotate.d/apiban-client
sudo chmod 0644 /etc/logrotate.d/apiban-client
sudo /usr/local/bin/apiban/apiban-iptables-client
echo "*/4 * * * * root /usr/local/bin/apiban/apiban-iptables-client >/dev/null 2>&1" | sudo tee -a /etc/crontab

View File

@@ -0,0 +1,22 @@
#!/bin/bash
cd /home/admin
mkdir apps
cp /tmp/ecosystem.config.js apps
cd apps
git clone https://github.com/jambonz/sbc-outbound.git
git clone https://github.com/jambonz/sbc-inbound.git
git clone https://github.com/jambonz/sbc-registrar.git
git clone https://github.com/jambonz/sbc-api-server.git
git clone https://github.com/jambonz/sbc-call-router.git
git clone https://github.com/jambonz/jambonz-api-server.git
cd /home/admin/apps/sbc-inbound && npm install
cd /home/admin/apps/sbc-outbound && npm install
cd /home/admin/apps/sbc-registrar && npm install
cd /home/admin/apps/sbc-call-router && npm install
cd /home/admin/apps/jambonz-api-server && npm install
# add entries to /etc/crontab to start everything on reboot
echo "@reboot admin /usr/bin/pm2 start /home/admin/apps/ecosystem.config.js" | sudo tee -a /etc/crontab
echo "@reboot admin sudo env PATH=$PATH:/usr/bin pm2 logrotate -u admin" | sudo tee -a /etc/crontab

View File

@@ -0,0 +1,5 @@
#!/bin/bash
sudo apt-get update
sudo apt-get install -y chrony
sudo systemctl enable chrony

View File

@@ -0,0 +1,19 @@
#!/bin/bash
VERSION=$1
echo "drachtio version to install is ${VERSION}"
chmod 0777 /usr/local/src
cd /usr/local/src
git clone https://github.com/davehorton/drachtio-server.git -b ${VERSION}
cd drachtio-server
git submodule update --init --recursive
./autogen.sh && mkdir -p build && cd $_ && ../configure CPPFLAGS='-DNDEBUG' && make && sudo make install
sudo mv /tmp/drachtio.conf.xml /etc
sudo mv /tmp/drachtio.service /etc/systemd/system
sudo mv /tmp/vimrc.local /etc/vim/vimrc.local
sudo chmod 644 /etc/drachtio.conf.xml
sudo chmod 644 /etc/systemd/system/drachtio.service
sudo chmod 644 /etc/vim/vimrc.local
sudo chown root:root /etc/drachtio.conf.xml /etc/vim/vimrc.local
sudo systemctl enable drachtio

View File

@@ -0,0 +1,28 @@
#!/bin/bash
sudo cp /etc/fail2ban/jail.conf /etc/fail2ban/jail.local
sudo bash -c "cat >> /etc/fail2ban/jail.local" << EOF
[drachtio-tcp]
maxretry = 1
bantime = 86400
enabled = true
filter = drachtio
port = 5060
protocol = tcp
logpath = /var/log/drachtio/drachtio.log
[drachtio-udp]
maxretry = 1
bantime = 86400
enabled = true
filter = drachtio
port = 5060
protocol = udp
logpath = /var/log/drachtio/drachtio.log
EOF
sudo cp /tmp/drachtio-fail2ban.conf /etc/fail2ban/filter.d/drachtio.conf
sudo chmod 0644 /etc/fail2ban/filter.d/drachtio.conf

View File

@@ -0,0 +1,4 @@
#!/bin/bash
curl -sL https://deb.nodesource.com/setup_10.x | sudo bash - && sudo apt-get install -y nodejs
sudo npm install -g pino-pretty pm2 pm2-logrotate
sudo pm2 install pm2-logrotate

View File

@@ -0,0 +1,17 @@
#!/bin/bash
VERSION=$1
echo "rtpengine version to install is ${VERSION}"
cd /usr/local/src
git clone https://github.com/BelledonneCommunications/bcg729.git
cd bcg729
cmake . -DCMAKE_INSTALL_PREFIX=/usr && make && sudo make install chdir=/usr/local/src/bcg729
cd /usr/local/src
git clone https://github.com/sipwise/rtpengine.git -b ${VERSION}
cd rtpengine/daemon
make with_transcoding=yes
cp /usr/local/src/rtpengine/daemon/rtpengine /usr/local/bin
sudo mv /tmp/rtpengine.service /etc/systemd/system
sudo chmod 644 /etc/systemd/system/rtpengine.service
sudo systemctl enable rtpengine

View File

@@ -0,0 +1,98 @@
{
"variables": {
"region": "us-east-1",
"ssh_username": "admin",
"ami_description": "jambonz SBC SIP+RTP",
"drachtio_version": "v0.8.4",
"instance_type": "t2.medium",
"rtp_engine_version": "mr7.4.1.5",
"rtp_engine_min_port": "40000",
"rtp_engine_max_port": "60000"
},
"builders": [{
"type": "amazon-ebs",
"region": "{{user `region`}}",
"source_ami_filter": {
"filters": {
"virtualization-type": "hvm",
"name": "debian-stretch-hvm-x86_64-gp2-*",
"root-device-type": "ebs"
},
"owners": ["379101102735"],
"most_recent": true
},
"instance_type": "{{user `instance_type`}}",
"ssh_username": "{{user `ssh_username`}}",
"ami_name": "jambonz-sbc-sip-rtp-{{isotime |clean_resource_name }}",
"ami_description": "{{user `ami_description`}}",
"launch_block_device_mappings": [
{
"device_name": "xvda",
"volume_size": 40,
"volume_type": "gp2",
"delete_on_termination": true
}
],
"security_group_filter": {
"filters": {
"tag:Class": "packer"
}
},
"tags": {
"Name": "jambonz-sbc-sip"
},
"run_tags": {
"Name": "jambonz-sbc-sip"
}
}],
"provisioners": [
{
"type": "shell",
"inline": [
"while [ ! -f /var/lib/cloud/instance/boot-finished ]; do echo 'Waiting for cloud-init...'; sleep 1; done",
"sudo apt-get update",
"sudo apt-get -y install python gcc g++ make cmake build-essential git autoconf automake mysql-client redis-tools \\",
"curl telnet libtool libtool-bin libssl-dev libcurl4-openssl-dev libz-dev systemd-coredump liblz4-tool \\",
"iptables-dev libavformat-dev liblua5.1-0-dev libavfilter-dev libavcodec-dev libswresample-dev \\",
"libevent-dev libpcap-dev libxmlrpc-core-c3-dev markdown libjson-glib-dev lsb-release \\",
"libhiredis-dev gperf libspandsp-dev default-libmysqlclient-dev htop dnsutils gdb \\",
"gnupg2 wget pkg-config ca-certificates libjpeg-dev libsqlite3-dev libpcre3-dev libldns-dev \\",
"libspeex-dev libspeexdsp-dev libedit-dev libtiff-dev yasm valgrind libswscale-dev haveged \\",
"libopus-dev libsndfile-dev libshout3-dev libmpg123-dev libmp3lame-dev libopusfile-dev fail2ban",
"sudo chmod a+w /usr/local/src"
]
},
{
"type": "file",
"source": "files/",
"destination": "/tmp"
},
{
"type": "shell",
"script": "scripts/install_chrony.sh"
},
{
"type": "shell",
"execute_command": "chmod +x {{ .Path }}; sudo '{{ .Path }}' {{user `rtp_engine_version`}}",
"script": "scripts/install_rtpengine.sh"
},
{
"type": "shell",
"script": "scripts/install_nodejs.sh"
},
{
"type": "shell",
"execute_command": "chmod +x {{ .Path }}; sudo '{{ .Path }}' {{user `drachtio_version`}}",
"script": "scripts/install_drachtio.sh"
},
{
"type": "shell",
"script": "scripts/install_app.sh"
},
{
"type": "shell",
"script": "scripts/install_fail2ban.sh"
}
]
}

View File

View File

@@ -0,0 +1,53 @@
# terraform for a jambonz "small" system
This terraform configuration generates a jambonz deployment consisting of two AMIs:
- an SBC server (SIP + RTP)
- a feature server
Each instance is assigned an elastic IP.
It also creates an Elasticache redis instance and an Aurora serverless mysql database, along with the necessary security groups.
## Before running the terraform script
There are several changes you will need to make before running the script.
1. This script creates a VPC in the us-east-1 region. You may prefer to run in a different region: if so, edit the variables.tf file accordingly.
2. If you _do_ want to run in a different region, you need to make sure the AMIs that the terraform script deploys are available in your preferred region. That means either you run the packer scripts yourself and create the AMIs, or you contact me and ask me to copy the AMIs into your preferred region. If you create the AMIs yourself you will need to change the "owner" attribute in the ami filter in jambonz.tf to your own aws id.
3. You will need to download a json service key file from google cloud in order to use the speech services. Copy that file into the credentials folder in this project with the name gcp.json before you run terraform, since that is where the terraform script expects to find it and what it expects to be named.
4. Also create an AWS access key id and secret access key in order to use AWS polly. Either provide these in the variables.tf file or override on the command line.
In general, feel free to customize the terraform scripts to your needs. They are documented and fairly self-explanatory.
## Running the terraform script
Please review and edit the [variables.tf](./variables.tf) file as appropriate, given the suggestions above.
Then install the depedencies:
```
terraform init
```
If you've made changes to the script, test it out:
```
terraform plan
```
When you are ready to run it, do terraform apply, optionally passing any command-line arguments that you want to override variables.tf:
```
terraform apply -var='key_name=aws-dhorton-key' \
-var 'aws_access_key_id_runtime=KASYJH6IPHQPOLMVLWID' \
-var 'aws_secret_access_key_runtime=WkjfaufgzHSHDYKQ+/+1tMPO4/DM9ADWO+asdfasdf'
```
(Note: those are not valid keys of course, just for explanatory purposes).
If you want to destroy the resources created, then:
```
terraform destroy
```

View File

@@ -0,0 +1,53 @@
# create a subnet group for aurora mysql
resource "aws_db_subnet_group" "jambonz" {
name = "jambonz-mysql-subnet"
subnet_ids = local.my_subnet_ids
}
# create aurora database
resource "aws_rds_cluster" "jambonz" {
cluster_identifier = "aurora-cluster-jambonz"
engine = "aurora"
engine_version = "5.6.10a"
engine_mode = "serverless"
vpc_security_group_ids = [aws_security_group.allow_mysql.id]
db_subnet_group_name = aws_db_subnet_group.jambonz.name
database_name = "jambones"
master_username = "admin"
master_password = "JambonzR0ck$"
skip_final_snapshot = true
backup_retention_period = 5
preferred_backup_window = "07:00-09:00"
scaling_configuration {
auto_pause = true
min_capacity = 1
max_capacity = 2
seconds_until_auto_pause = 300
}
}
# create a subnet group for redis elasticache
resource "aws_elasticache_subnet_group" "jambonz" {
name = "jambonz-cache-subnet"
subnet_ids = local.my_subnet_ids
}
# create redis cluster
resource "aws_elasticache_cluster" "jambonz" {
cluster_id = "jambonz"
engine = "redis"
node_type = "cache.t2.micro"
num_cache_nodes = 1
subnet_group_name = "jambonz-cache-subnet"
security_group_ids = [aws_security_group.allow_redis.id]
parameter_group_name = "default.redis5.0"
engine_version = "5.0.6"
port = 6379
tags = {
Name = "jambonz"
}
depends_on = [aws_elasticache_subnet_group.jambonz]
}

View File

@@ -0,0 +1,44 @@
#!/bin/bash
cat << EOF > /home/admin/apps/ecosystem.config.js
module.exports = {
apps : [
{
name: 'jambonz-feature-server',
cwd: '/home/admin/apps/jambonz-feature-server',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-feature-server.log',
err_file: '/home/admin/.pm2/logs/jambonz-feature-server.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
GOOGLE_APPLICATION_CREDENTIALS: '/home/admin/credentials/gcp.json',
AWS_ACCESS_KEY_ID: '${AWS_ACCESS_KEY_ID}',
AWS_SECRET_ACCESS_KEY: '${AWS_SECRET_ACCESS_KEY}',
AWS_REGION: '${AWS_REGION}',
ENABLE_DATADOG_METRICS: 0,
ENABLE_DATADOG_METRICS: 0,
JAMBONES_NETWORK_CIDR: '${VPC_CIDR}',
JAMBONES_MYSQL_HOST: '${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
JAMBONES_LOGLEVEL: 'debug',
HTTP_PORT: 3000,
DRACHTIO_HOST: '127.0.0.1',
DRACHTIO_PORT: 9022,
DRACHTIO_SECRET: 'cymru',
JAMBONES_SBCS: '${JAMBONES_SBC_SIP_IPS}',
JAMBONES_FEATURE_SERVERS: '127.0.0.1:9022:cymru',
JAMBONES_FREESWITCH: '127.0.0.1:8021:JambonzR0ck$'
}
}]
};
EOF

View File

@@ -0,0 +1,120 @@
# Create feature server instance
# select the most recent jambonz AMIs
data "aws_ami" "jambonz-feature-server" {
most_recent = true
name_regex = "^jambonz-feature-server"
owners = ["376029039784"]
}
# create an elastic IP and copy google credentials into place
resource "aws_eip" "jambonz-feature-server" {
count = length(var.jambonz_feature_server_private_ips)
instance = aws_instance.jambonz-feature-server[count.index].id
vpc = true
# copy user-provided google application credentials file
provisioner "file" {
source = "credentials/"
destination = "/home/admin/credentials"
connection {
type = "ssh"
user = "admin"
host = self.public_ip
}
}
}
# create the jambonz feature server instance
resource "aws_instance" "jambonz-feature-server" {
count = length(var.jambonz_feature_server_private_ips)
ami = data.aws_ami.jambonz-feature-server.id
instance_type = var.ec2_instance_type
private_ip = var.jambonz_feature_server_private_ips[count.index]
subnet_id = local.my_subnet_ids[count.index]
vpc_security_group_ids = [aws_security_group.allow_jambonz_feature_server.id]
user_data = templatefile("${path.module}/feature-server.ecosystem.config.js.tmpl", {
VPC_CIDR = var.vpc_cidr_block
JAMBONES_SBC_SIP_IPS = join(",", var.jambonz_sbc_sip_rtp_private_ips)
JAMBONES_MYSQL_HOST = aws_rds_cluster.jambonz.endpoint
JAMBONES_MYSQL_USER = aws_rds_cluster.jambonz.master_username
JAMBONES_MYSQL_PASSWORD = aws_rds_cluster.jambonz.master_password
JAMBONES_REDIS_HOST = aws_elasticache_cluster.jambonz.cache_nodes.0.address
AWS_ACCESS_KEY_ID = var.aws_access_key_id_runtime
AWS_SECRET_ACCESS_KEY = var.aws_secret_access_key_runtime
AWS_REGION = var.region
})
key_name = var.key_name
monitoring = true
depends_on = [aws_internet_gateway.jambonz, aws_elasticache_cluster.jambonz, aws_rds_cluster.jambonz]
tags = {
Name = "jambonz-feature-server"
}
}
# Create SBC SIP+RTP instance
data "aws_ami" "jambonz-sbc-sip-rtp" {
most_recent = true
name_regex = "^jambonz-sbc-sip-rtp"
owners = ["376029039784"]
}
resource "aws_eip" "jambonz-sbc-sip-rtp" {
count = length(var.jambonz_sbc_sip_rtp_private_ips)
instance = aws_instance.jambonz-sbc-sip-rtp-server[count.index].id
vpc = true
}
resource "aws_instance" "jambonz-sbc-sip-rtp-server" {
count = length(var.jambonz_sbc_sip_rtp_private_ips)
ami = data.aws_ami.jambonz-sbc-sip-rtp.id
instance_type = var.ec2_instance_type
private_ip = var.jambonz_sbc_sip_rtp_private_ips[count.index]
subnet_id = local.my_subnet_ids[count.index]
vpc_security_group_ids = [aws_security_group.allow_jambonz_sbc_sip_rtp.id]
user_data = templatefile("${path.module}/sbc-sip-rtp-server.ecosystem.config.js.tmpl", {
VPC_CIDR = var.vpc_cidr_block
JAMBONES_FEATURE_SERVER_FOR_API_CALLS = var.jambonz_feature_server_private_ips[0]
JAMBONES_FEATURE_SERVER_IPS = join(",", var.jambonz_feature_server_private_ips)
JAMBONES_SBC_SIP_IPS = join(",", var.jambonz_sbc_sip_rtp_private_ips)
JAMBONES_RTPENGINE_IPS = join(",", local.rtpengine_hostports)
JAMBONES_MYSQL_HOST = aws_rds_cluster.jambonz.endpoint
JAMBONES_MYSQL_USER = aws_rds_cluster.jambonz.master_username
JAMBONES_MYSQL_PASSWORD = aws_rds_cluster.jambonz.master_password
JAMBONES_REDIS_HOST = aws_elasticache_cluster.jambonz.cache_nodes.0.address
})
key_name = var.key_name
monitoring = true
depends_on = [aws_internet_gateway.jambonz, aws_elasticache_cluster.jambonz, aws_rds_cluster.jambonz]
tags = {
Name = "jambonz-sbc-sip-rtp-server"
}
}
# seed the database, from one of the feature servers
resource "null_resource" "seed" {
# Bootstrap script can run on any instance of the cluster
# So we just choose the first in this case
connection {
type = "ssh"
user = "admin"
host = element(aws_eip.jambonz-sbc-sip-rtp.*.public_ip, 0)
}
provisioner "remote-exec" {
inline = [
"mysql -h ${aws_rds_cluster.jambonz.endpoint} -u admin -D jambones -pJambonzR0ck$ < /home/admin/apps/jambonz-api-server/db/jambones-sql.sql",
"mysql -h ${aws_rds_cluster.jambonz.endpoint} -u admin -D jambones -pJambonzR0ck$ < /home/admin/apps/jambonz-api-server/db/create-admin-token.sql",
]
}
depends_on = [aws_rds_cluster.jambonz, aws_instance.jambonz-sbc-sip-rtp-server]
}

View File

@@ -0,0 +1,250 @@
provider "aws" {
profile = "default"
region = var.region
}
# create a VPC
resource "aws_vpc" "jambonz" {
cidr_block = var.vpc_cidr_block
tags = {
Name = "jambonz"
}
}
# add an internet gateway to the VPC
resource "aws_internet_gateway" "jambonz" {
vpc_id = aws_vpc.jambonz.id
tags = {
Name = "jambonz"
}
}
# add a route to the default route table
# to route non-local traffic via the internet gateway
resource "aws_default_route_table" "jambonz" {
default_route_table_id = aws_vpc.jambonz.default_route_table_id
route {
cidr_block = "0.0.0.0/0"
gateway_id = aws_internet_gateway.jambonz.id
}
tags = {
Name = "jambonz default route table"
}
}
# create a public subnet
resource "aws_subnet" "jambonz" {
for_each = var.public_subnets
vpc_id = aws_vpc.jambonz.id
availability_zone = each.key
cidr_block = each.value
tags = {
Name = "jambonz"
}
}
# for ease of reference later on, create a list of public subnet ids
locals {
my_subnet_ids = [for v in aws_subnet.jambonz : v.id]
rtpengine_hostports = [for ip in var.jambonz_sbc_sip_rtp_private_ips : "${ip}:22222"]
}
# create a security group that allows any server in the VPC to access redis
resource "aws_security_group" "allow_redis" {
name = "allow_redis"
description = "Allow redis connections"
vpc_id = aws_vpc.jambonz.id
ingress {
description = "redis from VPC"
from_port = 6379
to_port = 6379
protocol = "tcp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
tags = {
Name = "allow_redis"
}
}
# create a security group that allows any server in the VPC to access aurora
resource "aws_security_group" "allow_mysql" {
name = "allow_mysql"
description = "Allow mysl connections"
vpc_id = aws_vpc.jambonz.id
ingress {
description = "mysql from VPC"
from_port = 3306
to_port = 3306
protocol = "tcp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
tags = {
Name = "allow_mysql"
}
}
# create a security group to allow sip, rtp and http to the sbc sip+rtp server
resource "aws_security_group" "allow_jambonz_sbc_sip_rtp" {
name = "allow_jambonz_sbc_sip_rtp"
description = "Allow traffic to jambonz sbc sip rtp server"
vpc_id = aws_vpc.jambonz.id
ingress {
description = "ssh"
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
ingress {
description = "sip from everywhere"
from_port = 5060
to_port = 5060
protocol = "udp"
cidr_blocks = ["0.0.0.0/0"]
}
ingress {
description = "sip from everywhere"
from_port = 5060
to_port = 5060
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
ingress {
description = "rtp from everywhere"
from_port = 40000
to_port = 60000
protocol = "udp"
cidr_blocks = ["0.0.0.0/0"]
}
ingress {
description = "rtpengine ng protocol from VPC"
from_port = 22222
to_port = 22222
protocol = "udp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
ingress {
description = "http"
from_port = 3000
to_port = 3000
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
tags = {
Name = "allow_jambonz_sbc_sip"
}
}
# create a security group to allow ssh to feature server
resource "aws_security_group" "allow_jambonz_feature_server" {
name = "allow_jambonz_feature_server"
description = "Allow traffic needed for jambonz feature server"
vpc_id = aws_vpc.jambonz.id
ingress {
description = "ssh"
from_port = 22
to_port = 22
protocol = "tcp"
cidr_blocks = ["0.0.0.0/0"]
}
ingress {
description = "http"
from_port = 3000
to_port = 3000
protocol = "tcp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
ingress {
description = "sip from VPC"
from_port = 5060
to_port = 5060
protocol = "tcp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
ingress {
description = "sip from VPC"
from_port = 5060
to_port = 5060
protocol = "udp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
ingress {
description = "freeswitch sip from VPC"
from_port = 5080
to_port = 5080
protocol = "tcp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
ingress {
description = "freeswitch sip from VPC"
from_port = 5080
to_port = 5080
protocol = "udp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
ingress {
description = "rtp"
from_port = 25000
to_port = 40000
protocol = "udp"
cidr_blocks = [aws_vpc.jambonz.cidr_block]
}
egress {
from_port = 0
to_port = 0
protocol = "-1"
cidr_blocks = ["0.0.0.0/0"]
}
tags = {
Name = "allow_jambonz_feature_server"
}
}

View File

@@ -0,0 +1,142 @@
#!/bin/bash
cat << EOF > /home/admin/apps/ecosystem.config.js
module.exports = {
apps : [
{
name: 'jambonz-api-server',
cwd: '/home/admin/apps/jambonz-api-server',
script: 'app.js',
out_file: '/home/admin/.pm2/logs/jambonz-api-server.log',
err_file: '/home/admin/.pm2/logs/jambonz-api-server.log',
combine_logs: true,
instance_var: 'INSTANCE_ID',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
JAMBONES_MYSQL_HOST: '${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
JAMBONES_LOGLEVEL: 'info',
JAMBONE_API_VERSION: 'v1',
JAMBONES_CREATE_CALL_URL: 'http://${JAMBONES_FEATURE_SERVER_FOR_API_CALLS}:3000/v1/createCall',
HTTP_PORT: 3000
},
},
{
name: 'sbc-call-router',
cwd: '/home/admin/apps/sbc-call-router',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-call-router.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-call-router.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
HTTP_PORT: 4000,
JAMBONES_INBOUND_ROUTE: '127.0.0.1:4002',
JAMBONES_OUTBOUND_ROUTE: '127.0.0.1:4003',
JAMBONZ_TAGGED_INBOUND: 1,
JAMBONES_NETWORK_CIDR: '${VPC_CIDR}',
}
},
{
name: 'sbc-registrar',
cwd: '/home/admin/apps/sbc-registrar',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-registrar.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-registrar.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
ENABLE_DATADOG_METRICS: 0,
JAMBONES_LOGLEVEL: 'info',
DRACHTIO_HOST: '127.0.0.1',
DRACHTIO_PORT: 9022,
DRACHTIO_SECRET: 'cymru',
JAMBONES_MYSQL_HOST: '${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
}
},
{
name: 'sbc-outbound',
cwd: '/home/admin/apps/sbc-outbound',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-outbound.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-outbound.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
JAMBONES_LOGLEVEL: 'info',
ENABLE_DATADOG_METRICS: 0,
DRACHTIO_HOST: '127.0.0.1',
DRACHTIO_PORT: 9022,
DRACHTIO_SECRET: 'cymru',
JAMBONES_RTPENGINES: '${JAMBONES_RTPENGINE_IPS}',
JAMBONES_MYSQL_HOST: '${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
}
},
{
name: 'sbc-inbound',
cwd: '/home/admin/apps/sbc-inbound',
script: 'app.js',
instance_var: 'INSTANCE_ID',
out_file: '/home/admin/.pm2/logs/jambonz-sbc-inbound.log',
err_file: '/home/admin/.pm2/logs/jambonz-sbc-inbound.log',
exec_mode: 'fork',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
JAMBONES_LOGLEVEL: 'info',
ENABLE_DATADOG_METRICS: 0,
DRACHTIO_HOST: '127.0.0.1',
DRACHTIO_PORT: 9022,
DRACHTIO_SECRET: 'cymru',
JAMBONES_FEATURE_SERVERS: '${JAMBONES_FEATURE_SERVER_IPS}',
JAMBONES_RTPENGINES: '${JAMBONES_RTPENGINE_IPS}',
JAMBONES_MYSQL_HOST: '${JAMBONES_MYSQL_HOST}',
JAMBONES_MYSQL_USER: '${JAMBONES_MYSQL_USER}',
JAMBONES_MYSQL_PASSWORD: '${JAMBONES_MYSQL_PASSWORD}',
JAMBONES_MYSQL_DATABASE: 'jambones',
JAMBONES_MYSQL_CONNECTION_LIMIT: 10,
JAMBONES_REDIS_HOST: '${JAMBONES_REDIS_HOST}',
JAMBONES_REDIS_PORT: 6379,
}
}]
};
EOF

View File

@@ -0,0 +1,39 @@
variable "region" {
description = "the aws region in which to create the VPC"
default = "us-east-1"
}
variable "vpc_cidr_block" {
description = "the CIDR block for the whole VPC"
default = "172.31.0.0/16"
}
variable "public_subnets" {
type = map(string)
default = {
"us-east-1a" = "172.31.32.0/24"
"us-east-1b" = "172.31.33.0/24"
}
}
variable "jambonz_sbc_sip_rtp_private_ips" {
type = list(string)
default = ["172.31.32.10"]
}
variable "jambonz_feature_server_private_ips" {
type = list(string)
default = ["172.31.32.100"]
}
variable "ec2_instance_type" {
description = "the EC2 instance type to use for the jambonz server"
default = "t2.medium"
}
variable "key_name" {
description = "name of an aws keypair that you have downloaded and wish to use to access the jambonz instance via ssh"
default = "your-key-here"
}
variable "aws_access_key_id_runtime" {
description = "AWS access key jambonz will use to access AWS Polly TTS"
default = "your-aws-access-key-id"
}
variable "aws_secret_access_key_runtime" {
description = "AWS secret access key jambonz will use to access AWS Polly TTS"
default = "your-aws-secret_access-key"
}

View File

@@ -49,3 +49,8 @@ terraform apply -var='key_name=aws-dhorton-key' \
```
(Note: those are not valid keys of course, just for explanatory purposes).
If you want to destroy the resources created, then:
```
terraform destroy
```

View File

@@ -78,7 +78,7 @@ resource "aws_instance" "jambonz-sbc-sip-server" {
vpc_security_group_ids = [aws_security_group.allow_jambonz_sbc_sip.id]
user_data = templatefile("${path.module}/sbc-sip-server.ecosystem.config.js.tmpl", {
VPC_CIDR = var.vpc_cidr_block
JAMBONES_FEATURE_SERVER_FOR_API_CALLS = var.jambonz_sbc_sip_private_ips[0]
JAMBONES_FEATURE_SERVER_FOR_API_CALLS = var.jambonz_feature_server_private_ips[0]
JAMBONES_FEATURE_SERVER_IPS = join(",", var.jambonz_feature_server_private_ips)
JAMBONES_SBC_SIP_IPS = join(",", var.jambonz_sbc_sip_private_ips)
JAMBONES_RTPENGINE_IPS = join(",", local.rtpengine_hostports)