Merge branch 'v0.5.0' into main

This commit is contained in:
Tyler Stiene 2021-12-13 00:17:30 -05:00
commit c410c49095
23 changed files with 1908 additions and 178 deletions

View File

@ -20,7 +20,13 @@ jobs:
name: Set up Go
uses: actions/setup-go@v2
with:
go-version: 1.15
go-version: 1.17
-
name: go-license install
run: go get github.com/google/go-licenses
-
name: go-license save
run: go-licenses save ./cmd/mumble-discord-bridge --force --save_path="./dist/LICENSES"
-
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v2
@ -29,4 +35,10 @@ jobs:
version: latest
args: release --rm-dist
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
name: Upload assets
uses: actions/upload-artifact@v2
with:
name: mdb
path: dist/*

4
.gitignore vendored
View File

@ -5,4 +5,6 @@ dist
*.out
*.test
cert.pem
*.gob
*.gob
docker-compose.yml
mdb-local

View File

@ -2,12 +2,14 @@
# Stage 1
FROM golang:1.16 as builder
FROM golang:1.17 as builder
WORKDIR /go/src/app
COPY . .
RUN curl -sfL https://install.goreleaser.com/github.com/goreleaser/goreleaser.sh | sh
RUN apt update && apt install -y libopus-dev
RUN ./bin/goreleaser build --skip-validate
RUN go install github.com/goreleaser/goreleaser@latest
RUN go install github.com/google/go-licenses@latest
RUN goreleaser build --skip-validate
RUN go-licenses save ./cmd/mumble-discord-bridge --force --save_path="./dist/LICENSES"
# Stage 2
@ -15,6 +17,7 @@ FROM alpine:latest as final
WORKDIR /opt/
RUN apk add opus
RUN mkdir /lib64 && ln -s /lib/libc.musl-x86_64.so.1 /lib64/ld-linux-x86-64.so.2
COPY --from=builder /go/src/app/dist/LICENSES .
COPY --from=builder /go/src/app/dist/mumble-discord-bridge_linux_amd64/mumble-discord-bridge .
# FROM ubuntu:latest as final

View File

@ -2,6 +2,7 @@ GOFILES=$(shell find ./ -type f -name '*.go')
mumble-discord-bridge: $(GOFILES) .goreleaser.yml
goreleaser build --skip-validate --rm-dist
go-licenses save ./cmd/mumble-discord-bridge --force --save_path="./dist/LICENSES"
dev: $(GOFILES) .goreleaser.yml
goreleaser build --skip-validate --rm-dist && sudo ./dist/mumble-discord-bridge_linux_amd64/mumble-discord-bridge

View File

@ -10,48 +10,6 @@ Several configuration variables must be set for the binary to function correctly
All variables can be set using flags or in the environment.
The binary will also attempt to load .env file located in the working directory.
```bash
Usage of ./mumble-discord-bridge:
-cpuprofile file
write cpu profile to file
-debug-level int
DEBUG_LEVEL, Discord debug level, optional, (default 1) (default 1)
-discord-cid string
DISCORD_CID, discord cid, required
-discord-command string
DISCORD_COMMAND, Discord command string, env alt DISCORD_COMMAND, optional, (defaults mumble-discord) (default "mumble-discord")
-discord-disable-text
DISCORD_DISABLE_TEXT, disable sending direct messages to discord, (default false)
-discord-gid string
DISCORD_GID, discord gid, required
-discord-token string
DISCORD_TOKEN, discord bot token, required
-mode string
MODE, [constant, manual, auto] determine which mode the bridge starts in, (default constant) (default "constant")
-mumble-address string
MUMBLE_ADDRESS, mumble server address, example example.com, required
-mumble-certificate string
MUMBLE_CERTIFICATE, client certificate to use when connecting to the Mumble server
-mumble-channel string
MUMBLE_CHANNEL, mumble channel to start in, using '/' to separate nested channels, optional
-mumble-disable-text
MUMBLE_DISABLE_TEXT, disable sending text to mumble, (default false)
-mumble-insecure bool ("true" or "false")
MUMBLE_INSECURE, mumble insecure, optional, (default false)
-mumble-password string
MUMBLE_PASSWORD, mumble password, optional
-mumble-port int
MUMBLE_PORT, mumble port, (default 64738) (default 64738)
-mumble-username string
MUMBLE_USERNAME, mumble username, (default: discord) (default "Discord")
-nice
NICE, whether the bridge should automatically try to 'nice' itself, (default false)
-to-discord-buffer int
TO_DISCORD_BUFFER, Jitter buffer from Mumble to Discord to absorb timing issues related to network, OS and hardware quality. (Increments of 10ms) (default 50)
-to-mumble-buffer int
TO_MUMBLE_BUFFER, Jitter buffer from Discord to Mumble to absorb timing issues related to network, OS and hardware quality. (Increments of 10ms) (default 50)
```
The bridge can be run with the follow modes:
```bash
@ -99,6 +57,8 @@ The bot requires the following permissions:
* Voice Channel Speak
* Voice Channel Use Voice Activity
Permission integer 36768768.
### Finding Discord CID and GID
Discord GID is a unique ID linked to one Discord Server, also called Guild. CID is similarly a unique ID for a Discord Channel. To find these you need to set Discord into developer Mode.
@ -187,6 +147,13 @@ A default jitter of 50ms should be adequate for most scenarios.
A warning will be logged if short burst or audio are seen.
A single warning can be ignored multiple warnings in short time spans would suggest the need for a larger jitter buffer.
## Monitoring the Bridge
The bridge can be started with a Prometheus metrics endpoint enabled.
The example folder contains the a docker-compose file that will spawn the bridge, Prometheus and Grafana configured to serve a single a pre-configured dashboard.
![Mumble Discord Bridge Grafana Dashboard](example/grafana-dashboard.png "Grafana Dashboard")
## Known Issues
Currently there is an issue opening the discord voice channel.

View File

@ -50,9 +50,12 @@ func main() {
discordSendBuffer := flag.Int("to-discord-buffer", lookupEnvOrInt("TO_DISCORD_BUFFER", 50), "TO_DISCORD_BUFFER, Jitter buffer from Mumble to Discord to absorb timing issues related to network, OS and hardware quality. (Increments of 10ms)")
discordCommand := flag.String("discord-command", lookupEnvOrString("DISCORD_COMMAND", "mumble-discord"), "DISCORD_COMMAND, Discord command string, env alt DISCORD_COMMAND, optional, (defaults mumble-discord)")
discordDisableText := flag.Bool("discord-disable-text", lookupEnvOrBool("DISCORD_DISABLE_TEXT", false), "DISCORD_DISABLE_TEXT, disable sending direct messages to discord, (default false)")
discordDisableBotStatus := flag.Bool("discord-disable-bot-status", lookupEnvOrBool("DISCORD_DISABLE_BOT_STATUS", false), "DISCORD_DISABLE_BOT_STATUS, disable updating bot status, (default false)")
mode := flag.String("mode", lookupEnvOrString("MODE", "constant"), "MODE, [constant, manual, auto] determine which mode the bridge starts in, (default constant)")
nice := flag.Bool("nice", lookupEnvOrBool("NICE", false), "NICE, whether the bridge should automatically try to 'nice' itself, (default false)")
debug := flag.Int("debug-level", lookupEnvOrInt("DEBUG", 1), "DEBUG_LEVEL, Discord debug level, optional, (default 1)")
promEnable := flag.Bool("prometheus-enable", lookupEnvOrBool("PROMETHEUS_ENABLE", false), "PROMETHEUS_ENABLE, Enable prometheus metrics")
promPort := flag.Int("prometheus-port", lookupEnvOrInt("PROMETHEUS_PORT", 9559), "PROMETHEUS_PORT, Prometheus metrics port, optional, (default 9559)")
cpuprofile := flag.String("cpuprofile", "", "write cpu profile to `file`")
@ -85,6 +88,10 @@ func main() {
}
}
if *promEnable {
go bridge.StartPromServer(*promPort)
}
// Optional CPU Profiling
if *cpuprofile != "" {
f, err := os.Create(*cpuprofile)
@ -129,6 +136,7 @@ func main() {
CID: *discordCID,
DiscordStartStreamingCount: discordStartStreamingCount,
DiscordDisableText: *discordDisableText,
DiscordDisableBotStatus: *discordDisableBotStatus,
Version: version,
},
Connected: false,
@ -136,6 +144,8 @@ func main() {
MumbleUsers: make(map[string]bool),
}
bridge.PromApplicationStartTime.SetToCurrentTime()
// MUMBLE SETUP
Bridge.BridgeConfig.MumbleConfig = gumble.NewConfig()
Bridge.BridgeConfig.MumbleConfig.Username = *mumbleUsername
@ -149,6 +159,7 @@ func main() {
Bridge.BridgeConfig.MumbleConfig.Attach(gumbleutil.Listener{
Connect: Bridge.MumbleListener.MumbleConnect,
UserChange: Bridge.MumbleListener.MumbleUserChange,
// ChannelChange: Bridge.MumbleListener.MumbleChannelChange,
})
// DISCORD SETUP
@ -198,11 +209,6 @@ func main() {
Bridge.Mode = bridge.BridgeModeConstant
Bridge.DiscordChannelID = Bridge.BridgeConfig.CID
go func() {
defer func() {
if r := recover(); r != nil {
fmt.Println("Bridge paniced", r)
}
}()
for {
Bridge.StartBridge()
log.Println("Bridge died")

View File

@ -1,3 +1,6 @@
# This a basic docker-compose file to run an instance an instance of Mumble-Discord-Bridge
# docker-compose -f ./docker-compose.yml up -d
version: "3"
services:

View File

@ -0,0 +1,69 @@
# This docker compose file contians an exmaple of staring Mumble-Discord-Bridge with Prometheus and Grafana
# The monitoring folder is need to provide the nesscary default configs for Promethus and Grafana
# Prometheus port 9090
# Grafana port 3030
version: '3.8'
volumes:
prometheus_data: {}
grafana_data: {}
services:
services:
mumble-discord-bridge:
image: stieneee/mumble-discord-bridge
restart: unless-stopped
networks:
- mdb
environment:
- MUMBLE_ADDRESS=example.com"
- MUMBLE_USERNAME=discord-bridge
- MUMBLE_PASSWORD=password
- DISCORD_TOKEN=token
- DISCORD_GID=gid
- DISCORD_CID=cid
prometheus:
image: prom/prometheus:latest
container_name: prometheus
volumes:
- ./prometheus:/etc/prometheus
- prometheus_data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus'
- '--web.console.libraries=/etc/prometheus/console_libraries'
- '--web.console.templates=/etc/prometheus/consoles'
- '--storage.tsdb.retention.time=200h'
- '--web.enable-lifecycle'
restart: unless-stopped
expose:
- 9090
ports:
- 9090:9090
depends_on:
- mumble-discord-bridge
grafana:
image: grafana/grafana:latest
container_name: grafana
volumes:
# - grafana_data:/var/lib/grafana
- ./grafana/provisioning/dashboards:/etc/grafana/provisioning/dashboards
- ./grafana/provisioning/datasources:/etc/grafana/provisioning/datasources
environment:
# - GF_SECURITY_ADMIN_USER=${ADMIN_USER:-admin}
# - GF_SECURITY_ADMIN_PASSWORD=${ADMIN_PASSWORD:-admin}
- GF_USERS_ALLOW_SIGN_UP=false
- GF_AUTH_ANONYMOUS_ENABLED=true
- GF_AUTH_ANONYMOUS_ORG_NAME=Main Org.
- GF_AUTH_ANONYMOUS_ORG_ROLE=Editor
restart: unless-stopped
expose:
- 3000
ports:
- 3030:3000
depends_on:
- prometheus

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

View File

@ -0,0 +1,24 @@
apiVersion: 1
providers:
# <string> an unique provider name. Required
- name: 'MDB'
# <int> Org id. Default to 1
orgId: 1
# <string> name of the dashboard folder.
folder: ''
# <string> folder UID. will be automatically generated if not specified
folderUid: ''
# <string> provider type. Default to 'file'
type: file
# <bool> disable dashboard deletion
disableDeletion: false
# <int> how often Grafana will scan for changed dashboards
updateIntervalSeconds: 30
# <bool> allow updating provisioned dashboards from the UI
allowUiUpdates: true
options:
# <string, required> path to dashboard files on disk. Required when using the 'file' type
path: /etc/grafana/provisioning/dashboards
# <bool> use folder names from filesystem to create folders in Grafana
foldersFromFilesStructure: false

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
apiVersion: 1
datasources:
- name: Prometheus
type: prometheus
access: proxy
orgId: 1
url: http://prometheus:9090
basicAuth: false
isDefault: true
editable: true

View File

@ -0,0 +1,12 @@
global:
scrape_interval: 15s
evaluation_interval: 15s
# A scrape configuration containing exactly one endpoint to scrape.
scrape_configs:
- job_name: 'mdb'
scrape_interval: 1s
static_configs:
- targets: [
'mumble-discord-bridge:9559',
]

2
go.mod
View File

@ -5,9 +5,9 @@ go 1.15
require (
github.com/bwmarrin/discordgo v0.23.3-0.20210512035133-7d7206b01bb5
github.com/joho/godotenv v1.3.0
github.com/prometheus/client_golang v1.11.0
github.com/stieneee/gopus v0.0.0-20210424193312-6d10f6090335
github.com/stieneee/gumble v0.0.0-20210424210604-732f48b5e0de
github.com/stieneee/tickerct v0.0.0-20210420020607-d1b092aa40e9
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad // indirect
golang.org/x/sys v0.0.0-20210108172913-0df2131ae363 // indirect
)

133
go.sum
View File

@ -1,38 +1,159 @@
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bwmarrin/discordgo v0.23.3-0.20210512035133-7d7206b01bb5 h1:VtiZMSjY2N6XpM1luSchBVX76QURpS0HA7BffVuHOCo=
github.com/bwmarrin/discordgo v0.23.3-0.20210512035133-7d7206b01bb5/go.mod h1:OMKxbTmkKofBjBi4/yidO3ItxbJ6PUfEUkjchM4En8c=
github.com/dchote/go-openal v0.0.0-20171116030048-f4a9a141d372 h1:tz3KnXWtRZR0RWOfcMNOw+HHezWLQa7vfSOWTtKjchI=
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dchote/go-openal v0.0.0-20171116030048-f4a9a141d372/go.mod h1:74z+CYu2/mx4N+mcIS/rsvfAxBPBV9uv8zRAnwyFkdI=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.11.0 h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ=
github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.26.0 h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/stieneee/gopus v0.0.0-20210424193312-6d10f6090335 h1:yzwz6AqGKysli5du4CrQ48BMGUCSkrl7V7Kbo9VaG8w=
github.com/stieneee/gopus v0.0.0-20210424193312-6d10f6090335/go.mod h1:tAKYr3fSBJGold7c9DMPlhupn9oy8hTgl3cZ0hoyRQs=
github.com/stieneee/gumble v0.0.0-20210424210604-732f48b5e0de h1:4dWOeXRnba4jHVa3KuWf7i/GOIAlBMR3euVTUXOey2I=
github.com/stieneee/gumble v0.0.0-20210424210604-732f48b5e0de/go.mod h1:hVIsmrlrudlx2HJbsDkIZI4crkv6NHSau0ldEWbQI/Y=
github.com/stieneee/tickerct v0.0.0-20210420020607-d1b092aa40e9 h1:0l2H6Oj6JHMmkqm9xaBMQA5MOGhPT+Nn/thlTUcD9Iw=
github.com/stieneee/tickerct v0.0.0-20210420020607-d1b092aa40e9/go.mod h1:54+oZlabriEpT52rPAjAeEWUFgYqv325LrS3hNxHGFE=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad h1:DN0cp81fZ3njFcrLCytUHRSUkqBjfTo4Tx9RJTWs0EY=
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3 h1:0GoQqolDA55aaLxZyTzK/Y2ePZzZTUrRacwib7cNsYQ=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210108172913-0df2131ae363 h1:wHn06sgWHMO1VsQ8F+KzDJx/JzqfsNLnc+oEi07qD7s=
golang.org/x/sys v0.0.0-20210108172913-0df2131ae363/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221 h1:/ZHdbVpdR/jk3g30/d4yUL0JU9kksj8+F/bnQUVLGDM=
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40 h1:JWgyZ1qgdTaF3N3oxC+MdTV7qvEEgHo3otj+HB5CM7Q=
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -42,6 +42,7 @@ type BridgeConfig struct {
CID string
DiscordStartStreamingCount int
DiscordDisableText bool
DiscordDisableBotStatus bool
Version string
}
@ -119,6 +120,9 @@ func (b *BridgeState) StartBridge() {
var err error
promBridgeStarts.Inc()
promBridgeStartTime.SetToCurrentTime()
// DISCORD Connect Voice
log.Println("Attempting to join Discord voice channel")
if b.DiscordChannelID == "" {
@ -138,7 +142,7 @@ func (b *BridgeState) StartBridge() {
// MUMBLE Connect
b.MumbleStream = &MumbleDuplex{}
b.MumbleStream = NewMumbleDuplex()
det := b.BridgeConfig.MumbleConfig.AudioListeners.Attach(b.MumbleStream)
defer det.Detach()
@ -177,26 +181,40 @@ func (b *BridgeState) StartBridge() {
defer close(toDiscord)
defer close(toMumble)
// From Discord
b.DiscordStream = NewDiscordDuplex(b)
// Start Passing Between
// From Mumble
go b.MumbleStream.fromMumbleMixer(ctx, &wg, toDiscord)
// From Discord
b.DiscordStream = &DiscordDuplex{
Bridge: b,
fromDiscordMap: make(map[uint32]fromDiscord),
}
go b.DiscordStream.discordReceivePCM(ctx, &wg, cancel)
go b.DiscordStream.fromDiscordMixer(ctx, &wg, toMumble)
// To Discord
go b.DiscordStream.discordSendPCM(ctx, &wg, cancel, toDiscord)
// Monitor Mumble
wg.Add(1)
go func() {
defer wg.Done()
b.MumbleStream.fromMumbleMixer(ctx, cancel, toDiscord)
}()
wg.Add(1)
go func() {
defer wg.Done()
b.DiscordStream.discordReceivePCM(ctx, cancel)
}()
wg.Add(1)
go func() {
defer wg.Done()
b.DiscordStream.fromDiscordMixer(ctx, toMumble)
}()
// To Discord
wg.Add(1)
go func() {
defer wg.Done()
b.DiscordStream.discordSendPCM(ctx, cancel, toDiscord)
}()
// Monitor
wg.Add(1)
go func() {
defer wg.Done()
ticker := time.NewTicker(500 * time.Millisecond)
for {
select {
@ -210,7 +228,6 @@ func (b *BridgeState) StartBridge() {
cancel()
}
case <-ctx.Done():
wg.Done()
return
}
}
@ -252,6 +269,9 @@ func (b *BridgeState) DiscordStatusUpdate() {
log.Printf("error pinging mumble server %v\n", err)
b.DiscordSession.UpdateListeningStatus("an error pinging mumble")
} else {
promMumblePing.Set(float64(resp.Ping.Milliseconds()))
b.MumbleUsersMutex.Lock()
b.BridgeMutex.Lock()
b.MumbleUserCount = resp.ConnectedUsers
@ -269,8 +289,16 @@ func (b *BridgeState) DiscordStatusUpdate() {
}
b.BridgeMutex.Unlock()
b.MumbleUsersMutex.Unlock()
b.DiscordSession.UpdateListeningStatus(status)
if !b.BridgeConfig.DiscordDisableBotStatus {
b.DiscordSession.UpdateListeningStatus(status)
}
}
discordHeartBeat := b.DiscordSession.LastHeartbeatAck.Sub(b.DiscordSession.LastHeartbeatSent).Milliseconds()
if discordHeartBeat > 0 {
promDiscordHeartBeat.Set(float64(discordHeartBeat))
}
}
}

View File

@ -227,9 +227,13 @@ func (l *DiscordListener) VoiceUpdate(s *discordgo.Session, event *discordgo.Voi
l.Bridge.MumbleClient.Self.Channel.Send(fmt.Sprintf("%v has left Discord channel\n", l.Bridge.DiscordUsers[id].username), false)
})
}
l.Bridge.BridgeMutex.Unlock()
delete(l.Bridge.DiscordUsers, id)
l.Bridge.BridgeMutex.Unlock()
}
}
l.Bridge.BridgeMutex.Lock()
promDiscordUsers.Set(float64(len(l.Bridge.DiscordUsers)))
l.Bridge.BridgeMutex.Unlock()
}
}

View File

@ -26,8 +26,19 @@ type fromDiscord struct {
type DiscordDuplex struct {
Bridge *BridgeState
discordMutex sync.Mutex
fromDiscordMap map[uint32]fromDiscord
discordMutex sync.Mutex
fromDiscordMap map[uint32]fromDiscord
discordSendSleepTick sleepct.SleepCT
discordReceiveSleepTick sleepct.SleepCT
}
func NewDiscordDuplex(b *BridgeState) *DiscordDuplex {
return &DiscordDuplex{
Bridge: b,
fromDiscordMap: make(map[uint32]fromDiscord),
discordSendSleepTick: sleepct.SleepCT{},
discordReceiveSleepTick: sleepct.SleepCT{},
}
}
// OnError gets called by dgvoice when an error is encountered.
@ -43,8 +54,8 @@ var OnError = func(str string, err error) {
}
// SendPCM will receive on the provied channel encode
// received PCM data into Opus then send that to Discordgo
func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, wg *sync.WaitGroup, cancel context.CancelFunc, pcm <-chan []int16) {
// received PCM data with Opus then send that to Discordgo
func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, cancel context.CancelFunc, pcm <-chan []int16) {
const channels int = 1
const frameRate int = 48000 // audio sampling rate
const frameSize int = 960 // uint16 size of each audio frame
@ -61,14 +72,27 @@ func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, wg *sync.WaitGroup,
// Generate Opus Silence Frame
opusSilence := []byte{0xf8, 0xff, 0xfe}
sleepTick := sleepct.SleepCT{}
sleepTick.Start(20 * time.Millisecond)
dd.discordSendSleepTick.Start(20 * time.Millisecond)
lastReady := true
var readyTimeout *time.Timer
var speakingStart time.Time
wg.Add(1)
// Spy on the PCM channel to notify
// TODO determine a method to notify a paused sleepct
// pcm := make(chan []int16, 10)
// go func() {
// for {
// t, ok := <-pcmIn
// if !ok {
// close(pcm)
// return
// } else {
// dd.discordSendSleepTick.Notify()
// pcm <- t
// }
// }
// }()
internalSend := func(opus []byte) {
dd.Bridge.DiscordVoice.RWMutex.RLock()
@ -86,7 +110,12 @@ func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, wg *sync.WaitGroup,
lastReady = true
readyTimeout.Stop()
} else {
dd.Bridge.DiscordVoice.OpusSend <- opus
select {
case dd.Bridge.DiscordVoice.OpusSend <- opus:
case <-ctx.Done():
}
promDiscordSentPackets.Inc()
}
dd.Bridge.DiscordVoice.RWMutex.RUnlock()
}
@ -94,12 +123,14 @@ func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, wg *sync.WaitGroup,
for {
select {
case <-ctx.Done():
wg.Done()
log.Println("Stopping Discord send PCM")
return
default:
}
sleepTick.SleepNextTarget()
// if we are not streaming try to pause
// promTimerDiscordSend.Observe(float64(dd.discordSendSleepTick.SleepNextTarget(ctx, !streaming)))
promTimerDiscordSend.Observe(float64(dd.discordSendSleepTick.SleepNextTarget(ctx, false)))
if (len(pcm) > 1 && streaming) || (len(pcm) > dd.Bridge.BridgeConfig.DiscordStartStreamingCount && !streaming) {
if !streaming {
@ -126,7 +157,7 @@ func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, wg *sync.WaitGroup,
// It is possible that short speaking cycle is the result of a short input to mumble (Not a problem). ie a quick tap of push to talk button.
// Or when timing delays are introduced via network, hardware or kernel delays (Problem).
// The problem delays result in choppy or stuttering sounds, especially when the silence frames are introduced into the opus frames below.
// Multiple short cycle delays can result in a Discrod rate limiter being trigger due to of multiple JSON speaking/not-speaking state changes
// Multiple short cycle delays can result in a discord rate limiter being trigger due to of multiple JSON speaking/not-speaking state changes
if time.Since(speakingStart).Milliseconds() < 50 {
log.Println("Warning: Short Mumble to Discord speaking cycle. Consider increaseing the size of the to Discord jitter buffer.")
}
@ -135,7 +166,9 @@ func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, wg *sync.WaitGroup,
// We want to do this after alerting the user of possible short speaking cycles
for i := 0; i < 5; i++ {
internalSend(opusSilence)
sleepTick.SleepNextTarget()
// promTimerDiscordSend.Observe(float64(dd.discordSendSleepTick.SleepNextTarget(ctx, true)))
promTimerDiscordSend.Observe(float64(dd.discordSendSleepTick.SleepNextTarget(ctx, false)))
}
dd.Bridge.DiscordVoice.Speaking(false)
@ -147,7 +180,7 @@ func (dd *DiscordDuplex) discordSendPCM(ctx context.Context, wg *sync.WaitGroup,
// ReceivePCM will receive on the the Discordgo OpusRecv channel and decode
// the opus audio into PCM then send it on the provided channel.
func (dd *DiscordDuplex) discordReceivePCM(ctx context.Context, wg *sync.WaitGroup, cancel context.CancelFunc) {
func (dd *DiscordDuplex) discordReceivePCM(ctx context.Context, cancel context.CancelFunc) {
var err error
lastReady := true
@ -158,8 +191,6 @@ func (dd *DiscordDuplex) discordReceivePCM(ctx context.Context, wg *sync.WaitGro
zeros[i] = 0
}
wg.Add(1)
for {
dd.Bridge.DiscordVoice.RWMutex.RLock()
if !dd.Bridge.DiscordVoice.Ready || dd.Bridge.DiscordVoice.OpusRecv == nil {
@ -184,7 +215,7 @@ func (dd *DiscordDuplex) discordReceivePCM(ctx context.Context, wg *sync.WaitGro
select {
case <-ctx.Done():
wg.Done()
log.Println("Stopping Discord receive PCM")
return
case p, ok = <-dd.Bridge.DiscordVoice.OpusRecv:
}
@ -234,7 +265,7 @@ func (dd *DiscordDuplex) discordReceivePCM(ctx context.Context, wg *sync.WaitGro
dd.fromDiscordMap[p.SSRC] = s
dd.discordMutex.Unlock()
p.PCM, err = s.decoder.Decode(p.Opus, deltaT*2, false)
p.PCM, err = s.decoder.Decode(p.Opus, deltaT, false)
if err != nil {
OnError("Error decoding opus data", err)
continue
@ -242,6 +273,8 @@ func (dd *DiscordDuplex) discordReceivePCM(ctx context.Context, wg *sync.WaitGro
// fmt.Println(p.SSRC, p.Type, deltaT, p.Sequence, p.Sequence-s.lastSequence, oldReceiving, s.streaming, len(p.Opus), len(p.PCM))
promDiscordReceivedPackets.Inc()
// Push data into pcm channel in 10ms chunks of mono pcm data
dd.discordMutex.Lock()
for l := 0; l < len(p.PCM); l = l + 480 {
@ -257,37 +290,41 @@ func (dd *DiscordDuplex) discordReceivePCM(ctx context.Context, wg *sync.WaitGro
}
}
dd.discordMutex.Unlock()
dd.discordReceiveSleepTick.Notify()
}
}
func (dd *DiscordDuplex) fromDiscordMixer(ctx context.Context, wg *sync.WaitGroup, toMumble chan<- gumble.AudioBuffer) {
func (dd *DiscordDuplex) fromDiscordMixer(ctx context.Context, toMumble chan<- gumble.AudioBuffer) {
mumbleSilence := gumble.AudioBuffer{}
for i := 3; i < 480; i++ {
mumbleSilence = append(mumbleSilence, 0x00)
}
var speakingStart time.Time
sleepTick := sleepct.SleepCT{}
sleepTick.Start(10 * time.Millisecond)
dd.discordReceiveSleepTick.Start(10 * time.Millisecond)
sendAudio := false
toMumbleStreaming := false
wg.Add(1)
for {
select {
case <-ctx.Done():
wg.Done()
log.Println("Stopping from Discord mixer")
return
default:
}
sleepTick.SleepNextTarget()
// if didn't send audio try to pause
// promTimerDiscordMixer.Observe(float64(dd.discordReceiveSleepTick.SleepNextTarget(ctx, !sendAudio)))
// TODO Additional pause testing
promTimerDiscordMixer.Observe(float64(dd.discordReceiveSleepTick.SleepNextTarget(ctx, false)))
dd.discordMutex.Lock()
sendAudio = false
internalMixerArr := make([][]int16, 0)
streamingCount := 0
// Work through each channel
for i := range dd.fromDiscordMap {
@ -306,6 +343,7 @@ func (dd *DiscordDuplex) fromDiscordMixer(ctx context.Context, wg *sync.WaitGrou
dd.fromDiscordMap[i] = x
}
streamingCount++
x1 := (<-dd.fromDiscordMap[i].pcm)
internalMixerArr = append(internalMixerArr, x1)
} else {
@ -318,6 +356,9 @@ func (dd *DiscordDuplex) fromDiscordMixer(ctx context.Context, wg *sync.WaitGrou
}
}
promDiscordArraySize.Set(float64(len(dd.fromDiscordMap)))
promDiscordStreaming.Set(float64(streamingCount))
dd.discordMutex.Unlock()
mumbleTimeoutSend := func(outBuf []int16) {
@ -329,8 +370,10 @@ func (dd *DiscordDuplex) fromDiscordMixer(ctx context.Context, wg *sync.WaitGrou
select {
case toMumble <- outBuf:
promSentMumblePackets.Inc()
case <-timeout:
log.Println("To Mumble timeout. Dropping packet")
promToMumbleDropped.Inc()
}
}
@ -354,7 +397,7 @@ func (dd *DiscordDuplex) fromDiscordMixer(ctx context.Context, wg *sync.WaitGrou
for i := 0; i < 5; i++ {
mumbleTimeoutSend(mumbleSilence)
sleepTick.SleepNextTarget()
promTimerDiscordMixer.Observe(float64(dd.discordReceiveSleepTick.SleepNextTarget(ctx, false)))
}
toMumbleStreaming = false

View File

@ -1,8 +1,10 @@
package bridge
import (
"fmt"
"log"
"strings"
"time"
"github.com/stieneee/gumble/gumble"
)
@ -12,28 +14,44 @@ type MumbleListener struct {
Bridge *BridgeState
}
func (l *MumbleListener) updateUsers() {
l.Bridge.MumbleUsersMutex.Lock()
l.Bridge.MumbleUsers = make(map[string]bool)
for _, user := range l.Bridge.MumbleClient.Self.Channel.Users {
//note, this might be too slow for really really big channels?
//event listeners block while processing
//also probably bad to rebuild the set every user change.
if user.Name != l.Bridge.MumbleClient.Self.Name {
l.Bridge.MumbleUsers[user.Name] = true
}
}
promMumbleUsers.Set(float64(len(l.Bridge.MumbleUsers)))
l.Bridge.MumbleUsersMutex.Unlock()
}
func (l *MumbleListener) MumbleConnect(e *gumble.ConnectEvent) {
//join specified channel
startingChannel := e.Client.Channels.Find(l.Bridge.BridgeConfig.MumbleChannel...)
if startingChannel != nil {
e.Client.Self.Move(startingChannel)
}
// l.updateUsers() // patch below
// This is an ugly patch Mumble Client state is slow to update
time.AfterFunc(5*time.Second, func() {
defer func() {
if r := recover(); r != nil {
fmt.Printf("Failed to mumble user list %v \n", r)
}
}()
l.updateUsers()
})
}
func (l *MumbleListener) MumbleUserChange(e *gumble.UserChangeEvent) {
l.Bridge.MumbleUsersMutex.Lock()
if e.Type.Has(gumble.UserChangeConnected) || e.Type.Has(gumble.UserChangeChannel) || e.Type.Has(gumble.UserChangeDisconnected) {
l.Bridge.MumbleUsers = make(map[string]bool)
for _, user := range l.Bridge.MumbleClient.Self.Channel.Users {
//note, this might be too slow for really really big channels?
//event listeners block while processing
//also probably bad to rebuild the set every user change.
if user.Name != l.Bridge.MumbleClient.Self.Name {
l.Bridge.MumbleUsers[user.Name] = true
}
}
}
l.Bridge.MumbleUsersMutex.Unlock()
l.updateUsers()
if e.Type.Has(gumble.UserChangeConnected) {

View File

@ -3,6 +3,7 @@ package bridge
import (
"context"
"log"
"strconv"
"sync"
"time"
@ -11,23 +12,34 @@ import (
"github.com/stieneee/mumble-discord-bridge/pkg/sleepct"
)
var mutex sync.Mutex
var fromMumbleArr []chan gumble.AudioBuffer
var mumbleStreamingArr []bool
// MumbleDuplex - listener and outgoing
type MumbleDuplex struct {
mutex sync.Mutex
fromMumbleArr []chan gumble.AudioBuffer
mumbleStreamingArr []bool
mumbleSleepTick sleepct.SleepCT
}
// MumbleDuplex - listenera and outgoing
type MumbleDuplex struct{}
func NewMumbleDuplex() *MumbleDuplex {
return &MumbleDuplex{
fromMumbleArr: make([]chan gumble.AudioBuffer, 0),
mumbleStreamingArr: make([]bool, 0),
mumbleSleepTick: sleepct.SleepCT{},
}
}
// OnAudioStream - Spawn routines to handle incoming packets
func (m MumbleDuplex) OnAudioStream(e *gumble.AudioStreamEvent) {
func (m *MumbleDuplex) OnAudioStream(e *gumble.AudioStreamEvent) {
// hold a reference ot the channel in the closure
localMumbleArray := make(chan gumble.AudioBuffer, 100)
streamChan := make(chan gumble.AudioBuffer, 100)
mutex.Lock()
fromMumbleArr = append(fromMumbleArr, localMumbleArray)
mumbleStreamingArr = append(mumbleStreamingArr, false)
mutex.Unlock()
m.mutex.Lock()
m.fromMumbleArr = append(m.fromMumbleArr, streamChan)
m.mumbleStreamingArr = append(m.mumbleStreamingArr, false)
m.mutex.Unlock()
promMumbleArraySize.Set(float64(len(m.fromMumbleArr)))
go func() {
name := e.User.Name
@ -37,57 +49,62 @@ func (m MumbleDuplex) OnAudioStream(e *gumble.AudioStreamEvent) {
// 480 per 10ms
for i := 0; i < len(p.AudioBuffer)/480; i++ {
localMumbleArray <- p.AudioBuffer[480*i : 480*(i+1)]
streamChan <- p.AudioBuffer[480*i : 480*(i+1)]
}
promReceivedMumblePackets.Inc()
m.mumbleSleepTick.Notify()
}
log.Println("Mumble audio stream ended", name)
}()
}
func (m MumbleDuplex) fromMumbleMixer(ctx context.Context, wg *sync.WaitGroup, toDiscord chan []int16) {
sleepTick := sleepct.SleepCT{}
sleepTick.Start(10 * time.Millisecond)
func (m *MumbleDuplex) fromMumbleMixer(ctx context.Context, cancel context.CancelFunc, toDiscord chan []int16) {
m.mumbleSleepTick.Start(10 * time.Millisecond)
sendAudio := false
bufferWarning := false
wg.Add(1)
droppingPackets := false
droppingPacketCount := 0
for {
select {
case <-ctx.Done():
wg.Done()
log.Println("Stopping From Mumble Mixer")
return
default:
}
sleepTick.SleepNextTarget()
promTimerMumbleMixer.Observe(float64(m.mumbleSleepTick.SleepNextTarget(ctx, false)))
mutex.Lock()
m.mutex.Lock()
sendAudio = false
internalMixerArr := make([]gumble.AudioBuffer, 0)
streamingCount := 0
// Work through each channel
for i := 0; i < len(fromMumbleArr); i++ {
if len(fromMumbleArr[i]) > 0 {
for i := 0; i < len(m.fromMumbleArr); i++ {
if len(m.fromMumbleArr[i]) > 0 {
sendAudio = true
if !mumbleStreamingArr[i] {
mumbleStreamingArr[i] = true
if !m.mumbleStreamingArr[i] {
m.mumbleStreamingArr[i] = true
streamingCount++
// log.Println("Mumble starting", i)
}
x1 := (<-fromMumbleArr[i])
x1 := (<-m.fromMumbleArr[i])
internalMixerArr = append(internalMixerArr, x1)
} else {
if mumbleStreamingArr[i] {
mumbleStreamingArr[i] = false
if m.mumbleStreamingArr[i] {
m.mumbleStreamingArr[i] = false
// log.Println("Mumble stopping", i)
}
}
}
mutex.Unlock()
m.mutex.Unlock()
promMumbleStreaming.Set(float64(streamingCount))
if sendAudio {
@ -99,22 +116,27 @@ func (m MumbleDuplex) fromMumbleMixer(ctx context.Context, wg *sync.WaitGroup, t
}
}
if len(toDiscord) > 20 {
if !bufferWarning {
log.Println("Warning: toDiscord buffer size")
bufferWarning = true
}
} else {
if bufferWarning {
log.Println("Resolved: toDiscord buffer size")
bufferWarning = false
}
}
promToDiscordBufferSize.Set(float64(len(toDiscord)))
select {
case toDiscord <- outBuf:
{
if droppingPackets {
log.Println("Discord buffer ok, total packets dropped " + strconv.Itoa(droppingPacketCount))
droppingPackets = false
}
}
default:
log.Println("Error: toDiscord buffer full. Dropping packet")
if !droppingPackets {
log.Println("Error: toDiscord buffer full. Dropping packets")
droppingPackets = true
droppingPacketCount = 0
}
droppingPacketCount++
promToDiscordDropped.Inc()
if droppingPacketCount > 250 {
log.Println("Discord Timeout")
cancel()
}
}
}
}

141
internal/bridge/prom.go Normal file
View File

@ -0,0 +1,141 @@
package bridge
import (
"log"
"net/http"
"strconv"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
var (
// Bridge General
PromApplicationStartTime = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_bridge_start_time",
Help: "The time the application started",
})
promBridgeStarts = promauto.NewCounter(prometheus.CounterOpts{
Name: "mdb_bridge_starts_count",
Help: "The number of times the bridge start routine has been called",
})
promBridgeStartTime = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_bridge_starts_time",
Help: "The time the current bridge instance started",
})
// MUMBLE
promMumblePing = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_mumble_ping",
Help: "Mumble ping",
})
promMumbleUsers = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_mumble_users_gauge",
Help: "The number of connected Mumble users",
})
promReceivedMumblePackets = promauto.NewCounter(prometheus.CounterOpts{
Name: "mdb_mumble_received_count",
Help: "The count of Mumble audio packets received",
})
promSentMumblePackets = promauto.NewCounter(prometheus.CounterOpts{
Name: "mdb_mumble_sent_count",
Help: "The count of audio packets sent to mumble",
})
// promToMumbleBufferSize = promauto.NewGauge(prometheus.GaugeOpts{
// Name: "mdb_to_mumble_buffer_gauge",
// Help: "",
// })
promToMumbleDropped = promauto.NewCounter(prometheus.CounterOpts{
Name: "mdb_to_mumble_dropped",
Help: "The number of packets timeouts to mumble",
})
promMumbleArraySize = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_to_mumble_array_size_gauge",
Help: "The array size of mumble streams",
})
promMumbleStreaming = promauto.NewGauge(prometheus.GaugeOpts{ //SUMMARY?
Name: "mdb_mumble_streaming_gauge",
Help: "The number of active audio streams streaming audio from mumble",
})
// DISCORD
// TODO Discrod Ping
promDiscordHeartBeat = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_discord_latency",
Help: "Discord heartbeat latency",
})
promDiscordUsers = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_discord_users_gauge",
Help: "The number of Connected Discord users",
})
promDiscordReceivedPackets = promauto.NewCounter(prometheus.CounterOpts{
Name: "mdb_discord_received_count",
Help: "The number of received packets from Discord",
})
promDiscordSentPackets = promauto.NewCounter(prometheus.CounterOpts{
Name: "mdb_discord_sent_count",
Help: "The number of packets sent to Discord",
})
promToDiscordBufferSize = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_discord_buffer_gauge",
Help: "The buffer size for packets to Discord",
})
promToDiscordDropped = promauto.NewCounter(prometheus.CounterOpts{
Name: "mdb_to_discord_dropped",
Help: "The count of packets dropped to discord",
})
promDiscordArraySize = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_discord_array_size_gauge",
Help: "The discord receiving array size",
})
promDiscordStreaming = promauto.NewGauge(prometheus.GaugeOpts{
Name: "mdb_discord_streaming_gauge",
Help: "The number of active audio streams streaming from discord",
})
// Sleep Timer Performance
promTimerDiscordSend = promauto.NewHistogram(prometheus.HistogramOpts{
Name: "mdb_timer_discord_send",
Help: "Timer performance for Discord send",
Buckets: []float64{1000, 2000, 5000, 10000, 20000},
})
promTimerDiscordMixer = promauto.NewHistogram(prometheus.HistogramOpts{
Name: "mdb_timer_discord_mixer",
Help: "Timer performance for the Discord mixer",
Buckets: []float64{1000, 2000, 5000, 10000, 20000},
})
promTimerMumbleMixer = promauto.NewHistogram(prometheus.HistogramOpts{
Name: "mdb_timer_mumble_mixer",
Help: "Timer performance for the Mumble mixer",
Buckets: []float64{1000, 2000, 5000, 10000, 20000},
})
)
func StartPromServer(port int) {
log.Println("Starting Metrics Server")
http.Handle("/metrics", promhttp.Handler())
http.ListenAndServe(":"+strconv.Itoa(port), nil)
}

View File

@ -1,20 +1,25 @@
package sleepct
import (
"context"
"fmt"
"sync"
"time"
)
// SleepCT - Sleep constant time step crates a sleep based ticker
// designed maintain a sleep/tick interval
// SleepCT - Sleep constant time step crates a sleep based ticker.
// designed to maintain a consistent sleep/tick interval.
// The sleeper can be paused waiting to be signaled from another go routine.
// This allows for the pausing of loops that do not have work to complete
type SleepCT struct {
sync.Mutex
d time.Duration // duration
t time.Time // last time target
d time.Duration // desired duration between targets
t time.Time // last time target
resume chan bool
wake time.Time // last wake time
drift int64 // last wake drift microseconds
}
func (s *SleepCT) Start(d time.Duration) {
s.resume = make(chan bool, 2)
if s.t.IsZero() {
s.d = d
s.t = time.Now()
@ -23,28 +28,65 @@ func (s *SleepCT) Start(d time.Duration) {
}
}
func (s *SleepCT) SleepNextTarget() {
s.Lock()
// Sleep to the next target duration.
// If pause it set to true will sleep the duration and wait to be notified.
// The notification channel will be cleared when the thread wakes.
// SleepNextTarget should not be call more than once concurrently.
func (s *SleepCT) SleepNextTarget(ctx context.Context, pause bool) int64 {
now := time.Now()
var last time.Time
// if target is zero safety net
if s.t.IsZero() {
fmt.Println("SleepCT reset")
last = now.Add(-s.d)
} else {
last = s.t
s.t = now.Add(-s.d)
}
// Next Target
s.t = last.Add(s.d)
// Sleep to Next Target
s.t = s.t.Add(s.d)
d := s.t.Sub(now)
// Compute the desired sleep time to reach the target
d := time.Until(s.t)
// Sleep
time.Sleep(d)
// delta := now.Sub(s.t)
// fmt.Println("delta", delta, d, time.Since(s.t))
// record the wake time
s.wake = time.Now()
s.drift = s.wake.Sub(s.t).Microseconds()
s.Unlock()
// fmt.Println(s.t.UnixMilli(), d.Milliseconds(), wake.UnixMilli(), drift, pause, len(s.resume))
// external pause control
if pause {
// don't pause if the notification channel has something
if len(s.resume) == 0 {
// fmt.Println("pause")
select {
case <-s.resume:
case <-ctx.Done():
// fmt.Println("sleepct ctx exit")
}
// if we did pause set the last sleep target to now
s.t = time.Now()
}
}
// Drain the resume channel
select {
case <-s.resume:
default:
}
// return the drift for monitoring purposes
return s.drift
}
// Notify attempts to resume a paused sleeper.
// It is safe to call notify from other processes and as often as desired.
func (s *SleepCT) Notify() {
select {
case s.resume <- true:
default:
}
}

View File

@ -1,6 +1,7 @@
package main
import (
"context"
"fmt"
"math"
"math/rand"
@ -18,7 +19,7 @@ const maxSleepInterval time.Duration = 15 * time.Millisecond
const tickerInterval time.Duration = 10 * time.Millisecond
const testDuration time.Duration = time.Duration(testCount * 10 * int64(time.Millisecond))
func testTickerBaseCase(wg *sync.WaitGroup) {
func testTickerBaseCase(wg *sync.WaitGroup, test *testing.T) {
wg.Add(1)
go func(interval time.Duration) {
now := time.Now()
@ -39,7 +40,7 @@ func testTickerBaseCase(wg *sync.WaitGroup) {
func TestTickerBaseCase(t *testing.T) {
wg := sync.WaitGroup{}
testTickerBaseCase(&wg)
testTickerBaseCase(&wg, t)
wg.Wait()
}
@ -115,7 +116,7 @@ func testSleepCT(wg *sync.WaitGroup) {
if i+1 < testCount {
time.Sleep(time.Duration(float64(maxSleepInterval) * rand.Float64()))
}
s.SleepNextTarget()
s.SleepNextTarget(context.TODO(), false)
}
fmt.Println("SleepCT (loaded) after", testDuration, "drifts", time.Since(start)-testDuration)
wg.Done()
@ -130,6 +131,35 @@ func TestSleepCT(t *testing.T) {
wg.Wait()
}
func testSleepCTPause(wg *sync.WaitGroup) {
wg.Add(1)
go func(interval time.Duration) {
now := time.Now()
start := now
// start the ticker
s := sleepct.SleepCT{}
s.Start(interval)
var i int64
for i = 0; i < testCount; i++ {
if i+1 < testCount {
time.Sleep(time.Duration(float64(maxSleepInterval) * rand.Float64()))
}
s.Notify()
s.SleepNextTarget(context.TODO(), true)
}
fmt.Println("SleepCT Pause (loaded) after", testDuration, "drifts", time.Since(start)-testDuration)
wg.Done()
}(tickerInterval)
}
func TestSleepCTPause(t *testing.T) {
wg := sync.WaitGroup{}
testSleepCTPause(&wg)
wg.Wait()
}
func TestIdleJitter(t *testing.T) {
wg := sync.WaitGroup{}