Compare commits

...

147 Commits

Author SHA1 Message Date
Cédric Verstraeten
448d4a946d Merge pull request #198 from kerberos-io/feature/fix-prerecording-duraiton
feature/fix-prerecording-duration
2025-07-04 16:57:01 +02:00
Cédric Verstraeten
61ac314bb7 Fix pre-recording time calculation logic in HandleRecordStream to handle initial recording case correctly 2025-07-04 14:44:13 +00:00
Cédric Verstraeten
c1b144ca28 Fix pre-recording time calculation by adjusting queued packets handling in HandleRecordStream 2025-07-04 14:37:22 +00:00
Cédric Verstraeten
e16987bf9d Refactor HandleRecordStream to improve pre-recording time calculation and adjust display time logic based on available queued packets. 2025-07-04 11:18:46 +00:00
Cédric Verstraeten
9991597984 Merge pull request #197 from kerberos-io/feature/add-duration-to-recordings
feature/add-duration-to-recordings
2025-07-04 09:18:07 +02:00
cedricve
2c0314cea4 Refactor HandleRecordStream to improve file renaming logic and enhance motion detection handling 2025-07-04 06:23:09 +00:00
cedricve
0584e52b98 Refactor HandleRecordStream to optimize pre-recording time calculation and streamline video stream handling 2025-07-03 20:34:18 +00:00
cedricve
1fc90eaee2 Refactor pre-recording time calculation and improve display time logic for better recording accuracy 2025-07-03 20:04:00 +00:00
cedricve
aef3eacbc9 Enhance pre-recording time calculation by incorporating GOP size and FPS; adjust display time and recording conditions based on pre-recording delta. 2025-07-03 17:51:46 +00:00
cedricve
2843568473 Refactor GOP size handling and enhance queue management for improved recording performance 2025-07-03 17:31:37 +00:00
Cédric Verstraeten
53ffc8cae0 Add GOP size configuration and enhance pre-recording handling for improved stream management 2025-07-02 13:28:02 +00:00
Cédric Verstraeten
86e654fe19 Add GOP size tracking and keyframe interval management for improved video processing 2025-07-02 10:51:23 +00:00
Cédric Verstraeten
46d57f7664 Enhance FPS calculation by adding timestamp-based averaging and improved SPS handling; implement debug logging for SPS information. 2025-07-02 09:53:47 +00:00
Cédric Verstraeten
963d8672eb Enhance recording process by adding display time calculation and logging for better tracking; add error handling for MP4 file creation when no samples are present. 2025-07-02 08:54:34 +00:00
Cédric Verstraeten
9b7a62816a Update mp4.go 2025-07-02 09:54:12 +02:00
Cédric Verstraeten
237134fe0e Update recording filename generation to include duration and motion rectangle for improved clarity 2025-07-01 15:03:01 +00:00
Cédric Verstraeten
c8730e8f26 Enhance recording filename generation to include motion rectangle and duration for improved clarity and uniqueness 2025-07-01 12:54:52 +00:00
Cédric Verstraeten
acbbe8b444 Enhance recording filename generation to include milliseconds and its length for improved uniqueness 2025-07-01 12:48:34 +00:00
Cédric Verstraeten
f690016aa5 Refactor motion detection to include motion rectangle and update logging levels for sample addition in MP4 track 2025-07-01 12:37:44 +00:00
Cédric Verstraeten
396cfe5d8b Merge pull request #191 from kerberos-io/feature/migrate-to--mp4ff
feature/Add MP4 video handling and update IPCamera configuration
2025-06-24 13:39:56 +02:00
Cédric Verstraeten
39fe640ccf Refactor logging in AddSampleToTrack method to use structured logging 2025-06-23 10:21:02 +00:00
Cédric Verstraeten
d389c9b0b6 Add logging for sample addition in MP4 track 2025-06-23 10:07:30 +00:00
Cédric Verstraeten
b149686db8 Remove Bento4 build steps and clean up Dockerfile structure 2025-06-23 09:57:04 +00:00
Cédric Verstraeten
c4358cbfad Fix typo in IPCamera struct: update VPSNALUs field JSON tag from "pps_nalus" to "vps_nalus" 2025-06-23 09:03:00 +00:00
Cédric Verstraeten
cfc5bd3dfe Remove unused audio stream retrieval in HandleRecordStream function 2025-06-23 07:58:39 +00:00
Cédric Verstraeten
c29c1b6a92 Merge branch 'master' into feature/migrate-to--mp4ff 2025-06-23 09:55:31 +02:00
Cédric Verstraeten
0f45a2a4b4 Merge branch 'feature/migrate-to--mp4ff' of github.com:kerberos-io/agent into feature/migrate-to--mp4ff 2025-06-23 09:54:41 +02:00
Cédric Verstraeten
92edcc13c0 Refactor OpenTelemetry tracing integration in RTSP client and components for improved context handling 2025-06-23 07:54:34 +00:00
cedricve
5392e2ba90 Update Dockerfile to remove incorrect source path and add Bento4 build process 2025-06-22 19:46:03 +00:00
cedricve
79e1f659c7 Update mongo-driver dependency from v1.17.4 to v1.17.3 to maintain compatibility 2025-06-21 20:13:38 +00:00
cedricve
bf35e5efb6 Implement OpenTelemetry tracing in the agent
- Added OpenTelemetry tracing support in main.go, including a new function startTracing to initialize the tracer with a configurable endpoint.
- Updated the environment attribute from "testing" to "develop" for better clarity in tracing.
- Integrated tracing into the RTSP connection process in gortsplib.go by creating a span for the Connect method.
- Enhanced the Bootstrap function in Kerberos.go to include tracing, marking the start and end of the bootstrap process.
- Introduced a new span in RunAgent to trace the execution flow and ensure proper span management.
2025-06-20 09:35:13 +00:00
Cédric Verstraeten
c50137f255 Comment out OpenTelemetry tracing initialization in main.go to simplify the codebase and remove unused functionality. 2025-06-16 10:30:02 +00:00
Cédric Verstraeten
f12da749b2 Remove OpenTelemetry tracing code from main.go and Kerberos.go files to simplify the codebase and eliminate unused dependencies. 2025-06-16 10:08:55 +00:00
Cédric Verstraeten
a166083423 Update machinery/src/packets/stream.go
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-06-16 10:20:43 +02:00
Cédric Verstraeten
b400d4e773 Refactor Dockerfile build commands to streamline Go build process and improve clarity 2025-06-16 06:42:08 +00:00
Cédric Verstraeten
120054d3e5 Add SampleRate and Channels fields to IPCamera configuration and update audio stream handling 2025-06-16 06:37:19 +00:00
cedricve
620117c31b Refactor WriteToTrack to use updated PacketTimestamp for video and audio samples, improving synchronization accuracy. 2025-06-07 22:12:15 +00:00
cedricve
4e371488c1 Remove unnecessary copy of mp4fragment in Dockerfile, streamlining the agent setup process. 2025-06-07 21:22:49 +00:00
cedricve
b154b56308 Refactor Dockerfile to remove CGO_ENABLED=0 from build command, simplifying the build process for the agent. 2025-06-07 21:17:25 +00:00
cedricve
6d92817237 Refactor HandleRecordStream to adjust maxRecordingPeriod calculation for improved timing accuracy. Simplify mp4 segment encoding logic to ensure it always attempts to encode the last segment, enhancing error handling. 2025-06-07 12:30:42 +00:00
cedricve
b8c1855830 Refactor HandleRecordStream to use milliseconds for timing calculations, improving accuracy in recording periods and motion detection logic. Update mp4 encoding to ensure segment encoding only occurs if a segment exists, preventing potential panics. 2025-06-07 11:53:03 +00:00
cedricve
a9f7ff4b72 Refactor HandleRecordStream to remove unused mp4.Movmuxer and streamline video sample handling with mp4Video, enhancing recording process and error logging. 2025-06-07 06:26:23 +00:00
Cédric Verstraeten
b3cd080e14 Refactor Dockerfile and main.go to enhance build process and streamline video handling 2025-06-06 15:14:45 +00:00
Cédric Verstraeten
bfde87f888 Refactor WriteToTrack to improve sample handling by using last processed audio and video samples, enhancing buffer duration calculation and streamlining packet processing. 2025-06-06 14:36:19 +00:00
Cédric Verstraeten
c4453bb8b3 Fix packet handling in WriteToTrack to ensure proper processing of next packets on timeout and empty data 2025-06-06 13:36:30 +00:00
Cédric Verstraeten
40f65a30b3 Clarify audio transcoding process in WriteToTrack with detailed comments on AAC to PCM_MULAW conversion 2025-06-06 13:33:28 +00:00
Cédric Verstraeten
5361de63e0 Refactor packet handling in WriteToTrack to improve buffer duration calculation and streamline packet reading 2025-06-06 13:23:09 +00:00
Cédric Verstraeten
3a8552d362 Enhance MP4 handling by updating track IDs in fragment creation, improving H264 and H265 NAL unit conversion, and adding support for HVC1 compatible brands in the ftyp box 2025-06-05 14:48:19 +00:00
Cédric Verstraeten
d3840103fc Add VPS NALUs support in IPCamera configuration and MP4 handling for improved video processing 2025-06-05 13:28:10 +00:00
Cédric Verstraeten
d12a9f0612 Refactor MP4 handling by simplifying Close method and adding last sample DTS tracking for better audio and video sample management 2025-06-05 10:59:44 +00:00
cedricve
c0d74f7e09 Remove placeholder comments from AddSampleToTrack and Close methods for cleaner code 2025-06-04 19:23:48 +00:00
cedricve
8ebea9e4c5 Refactor MP4 struct by removing unused video and audio fragment fields, and enhance track handling in Close method for better audio and subtitle track management 2025-06-04 19:03:58 +00:00
cedricve
89269caf92 Refactor AddSampleToTrack and SplitAACFrame methods to enhance audio sample handling and improve error logging 2025-06-04 18:36:00 +00:00
Cédric Verstraeten
0c83170f51 Fix AAC descriptor index in Close method to ensure correct audio track setup 2025-06-04 13:15:08 +00:00
Cédric Verstraeten
6081cb4be9 Update mp4.go 2025-06-04 14:39:44 +02:00
Cédric Verstraeten
ea1dbb3087 Refactor AddSampleToTrack method to improve AAC frame handling by splitting frames and updating duration calculations for audio samples 2025-06-04 09:49:29 +00:00
Cédric Verstraeten
0523208d36 Update mp4.go 2025-06-04 11:28:16 +02:00
Cédric Verstraeten
919f21b48b Refactor AddSampleToTrack method to create separate video and audio fragments, enhancing sample handling and improving error logging for AAC frames 2025-06-04 08:45:54 +00:00
cedricve
2c1c10a2ac Refactor AddSampleToTrack and Close methods to improve sample handling and track management for video and audio 2025-06-03 20:33:00 +00:00
cedricve
7e3320b252 Refactor AddSampleToTrack method to remove duration parameter and enhance fragment handling for video and audio tracks 2025-06-03 19:18:16 +00:00
Cédric Verstraeten
35ccac8b65 Refactor MP4 fragment handling in AddSampleToTrack method to separate video and audio fragments for improved track management 2025-06-03 13:29:36 +00:00
Cédric Verstraeten
dad8165d11 Enhance sample handling in AddSampleToTrack method to support multiple packets and improve error logging 2025-06-03 12:30:03 +00:00
Cédric Verstraeten
ba54188de2 Refactor video and audio track handling in MP4 structure to store track names and return track IDs for better management 2025-06-03 10:23:14 +00:00
cedricve
3b440c9905 Add audio and video codec detection in HandleRecordStream function 2025-06-03 06:27:25 +00:00
cedricve
42b98b7f20 Update mp4.go 2025-06-03 08:25:51 +02:00
cedricve
ba3312b57c Refactor AddSampleToTrack method to return error instead of panicking for better error handling 2025-06-03 05:55:23 +00:00
cedricve
223ba255e9 Fix signature handling in MP4 closing logic to ensure valid signatures are used for fingerprint 2025-06-02 17:45:05 +00:00
Cédric Verstraeten
a1df2be207 Implement signing feature with default private key configuration and update MP4 closing logic to include fingerprint signing 2025-06-02 16:02:06 +00:00
Cédric Verstraeten
d7f225ca73 Add signing configuration placeholder to the agent's config 2025-06-02 14:08:47 +00:00
Cédric Verstraeten
b3cfabb5df Update signing configuration to use private key for recording validation 2025-06-02 14:06:16 +00:00
Cédric Verstraeten
5310dd4550 Add signing configuration options to the agent 2025-06-02 13:50:48 +00:00
Cédric Verstraeten
cde7dbb58a Add configuration options for signing recordings and public key usage 2025-06-02 13:41:15 +00:00
Cédric Verstraeten
65e68231c7 Refactor MP4 handling in capture and video modules
- Updated the HandleRecordStream function to use TimeLegacy for packet timestamps instead of the previous Time conversion method.
- Modified the MP4 struct to replace InitSegment with a list of MediaSegments, allowing for better management of segments.
- Introduced StartTime to the MP4 struct to track the creation time of the MP4 file.
- Enhanced the Close method in the MP4 struct to properly handle segment indexing (SIDX) and ensure accurate duration calculations.
- Implemented helper functions to fill SIDX boxes and find segment data, improving the overall structure and readability of the code.
2025-06-02 12:27:22 +00:00
Cédric Verstraeten
5502555869 Integrate OpenTelemetry tracing in main and components, enhancing observability 2025-06-02 07:30:49 +00:00
cedricve
ad6e7e752f Refactor MP4 handling to remove commented-out track additions and enhance moov box management 2025-06-02 07:15:24 +00:00
cedricve
63af4660ef Refactor MP4 initialization and closing logic to improve segment handling and add custom UUID support 2025-06-01 20:07:36 +00:00
cedricve
24fc340001 Refactor MP4 initialization and sample addition logic to enhance duration handling and segment management 2025-05-30 19:06:56 +00:00
cedricve
78d786b69d Add custom UUID box and enhance MP4 file closing logic 2025-05-29 10:14:43 +00:00
cedricve
756aeaa0eb Refactor MP4 handling to improve sample addition and duration calculation 2025-05-28 18:36:34 +00:00
cedricve
055fb67d7a Update mp4.go 2025-05-26 21:59:23 +02:00
cedricve
bee522a6bf Refactor MP4 handling to improve sample addition and segment management 2025-05-26 06:00:17 +00:00
Cédric Verstraeten
3fbf59c622 Merge pull request #192 from kerberos-io/fix/do-not-add-aac-track
fix/add audio codec handling in HandleRecordStream function
2025-05-22 21:07:28 +02:00
cedricve
abd8b8b605 Add audio codec handling in HandleRecordStream function 2025-05-22 18:33:13 +00:00
cedricve
abdad47bf3 Add MP4 video handling and update IPCamera configuration
- Introduced a new video package with MP4 struct for video file handling.
- Updated IPCamera struct to include SPS and PPS NALUs.
- Enhanced stream handling in the capture process to utilize the new MP4 library.
- Added stream index management for better tracking of video and audio streams.
2025-05-22 05:53:33 +00:00
Cédric Verstraeten
d2c24edf5d Merge pull request #190 from kerberos-io/feature/update-workflow-do-not-push-to-latest
Update Docker build workflow to use input tag for image naming
2025-05-20 16:05:04 +02:00
Cédric Verstraeten
22f4a7f119 Update Docker build workflow to use input tag for image naming 2025-05-20 14:03:44 +00:00
Cédric Verstraeten
a25d3d32e4 Merge pull request #189 from kerberos-io/feature/allow-release-workflow-to-triggered-manually
feature/Enhance release workflow to include tag input for Docker image
2025-05-20 14:46:26 +02:00
Cédric Verstraeten
ed68c32e04 Enhance release workflow to include tag input for Docker image 2025-05-20 12:45:52 +00:00
Cédric Verstraeten
4114b3839a Merge pull request #187 from kerberos-io/upgrade/base-image
Update base image version in Dockerfile
2025-05-19 15:22:36 +02:00
Cédric Verstraeten
3f73c009fd Update Dockerfile
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-05-19 15:15:33 +02:00
Cédric Verstraeten
02fb70c76e Update base image version in Dockerfile 2025-05-19 14:52:28 +02:00
Cédric Verstraeten
aaddcb854d Merge pull request #185 from kerberos-io/feature/retry-windows-secondary-vault
Feature/retry windows secondary vault
2025-05-17 21:40:58 +02:00
cedricve
e73c7a6ecc Remove kstorageRetryPolicy from configuration 2025-05-17 19:37:07 +00:00
cedricve
1dc2202f37 Enhance logging for secondary Kerberos Vault upload process 2025-05-17 19:29:35 +00:00
cedricve
ac710ae1f5 Fix typo in Kerberos Vault max retries translation key 2025-05-17 19:16:27 +00:00
cedricve
f5ea82ff03 Add Kerberos Vault settings for max retries and timeout configuration 2025-05-17 19:14:02 +00:00
cedricve
ef52325240 Update Kerberos Vault configuration for max retries and timeout; adjust upload delay 2025-05-17 08:37:40 +00:00
cedricve
354855feb1 Refactor Kerberos Vault configuration for retry policy consistency 2025-05-17 08:23:32 +00:00
cedricve
c4cd25b588 Add Kerberos Vault configuration options and retry policy support 2025-05-17 08:21:28 +00:00
cedricve
dbb870229e Update config.json 2025-05-16 19:00:33 +02:00
cedricve
a66fe8c054 Merge branch 'master' into feature/retry-windows-secondary-vault 2025-05-16 19:00:13 +02:00
Cédric Verstraeten
2352431c79 Merge pull request #184 from kerberos-io/upgrade/gortsplib
upgrade/dependencies
2025-05-16 18:54:45 +02:00
cedricve
49bc168812 Refactor code structure for improved readability and maintainability 2025-05-16 15:53:40 +00:00
cedricve
98f1ebf20a Add retry policy for Kerberos Vault uploads and update configuration model 2025-05-16 15:50:59 +00:00
cedricve
65feb6d182 Add initial configuration file for agent settings 2025-05-15 12:20:04 +00:00
cedricve
58555d352f Update .gitignore and launch.json to reference .env.local instead of .env 2025-05-15 10:42:01 +00:00
Cédric Verstraeten
839a177cf0 Merge branch 'master' into feature/retry-windows-secondary-vault 2025-05-14 14:57:53 +02:00
Cédric Verstraeten
404517ec40 Merge pull request #183 from kerberos-io/cedricve-patch-1
Create .env
2025-05-14 14:56:46 +02:00
Cédric Verstraeten
035bd18bc2 Create .env 2025-05-14 14:56:31 +02:00
Cédric Verstraeten
8bf7a0d244 Update devcontainer.json 2025-05-14 14:53:41 +02:00
Cédric Verstraeten
607d8fd0d1 Merge pull request #182 from kerberos-io/feature/retry-windows-secondary-vault
Remove .env + config file, we will manually add as these are part of the .gitignore
2025-05-14 14:52:15 +02:00
Cédric Verstraeten
12807e289c remove .env + config file, we will manually add as these are part of the .gitignore 2025-05-14 14:36:16 +02:00
Cédric Verstraeten
3a984f1c73 Merge pull request #180 from kerberos-io/fix/merge-secondary-kerberos-vault-settings
Add support for secondary Kerberos Vault settings in configuration
2025-04-27 21:23:55 +02:00
cedricve
b84e34da06 Add support for secondary Kerberos Vault settings in configuration 2025-04-27 21:21:00 +02:00
Cédric Verstraeten
541d151570 Merge pull request #179 from kerberos-io/fix/omit-blank-kstorage
Make KStorage fields optional in JSON and BSON serialization
2025-04-27 20:53:18 +02:00
cedricve
4ad97e1286 Make KStorage fields optional in JSON and BSON serialization 2025-04-27 18:46:14 +00:00
Kilian
a80b375e89 Update README.md 2025-04-25 13:04:28 +02:00
Cédric Verstraeten
91cb390f6e Merge pull request #178 from kerberos-io/fix/secondary-vault-initialization
Fix/Add kstorage_secondary configuration field and initialize in environment vars
2025-04-24 11:59:39 +02:00
Cédric Verstraeten
90780dae28 Add kstorage_secondary configuration field and initialize in environment variable overrides 2025-04-24 09:56:53 +00:00
Cédric Verstraeten
ddb08e90e1 Merge pull request #176 from kerberos-io/feature/add-secondary-kerberos-vault
Add secondary KStorage for fallback (hybrid scenario)
2025-04-24 11:23:14 +02:00
Cédric Verstraeten
0d95026819 Add loading state for secondary persistence verification in Settings UI 2025-04-24 08:58:33 +00:00
Cédric Verstraeten
79db3a9dfe Add support for secondary Kerberos Vault configuration in environment variable overrides 2025-04-24 07:20:12 +00:00
cedricve
9f63ffd540 Add secondary persistence verification and UI integration 2025-04-23 20:30:11 +00:00
cedricve
9c7116a462 Add secondary persistence verification and UI integration 2025-04-23 15:24:24 +00:00
cedricve
dd9b4d43ac Update development API URLs to use port 8080 2025-04-23 16:19:56 +02:00
Cédric Verstraeten
aa63eca24c Add persistence configuration inputs for Kerberos Vault in Settings 2025-04-23 12:46:43 +00:00
Cédric Verstraeten
6df97171d9 Merge pull request #177 from kerberos-io/fix/increase-channel-size-for-audio-motion-hdhandshake
Fix/ Increase channel buffer sizes for communication handling
2025-04-23 13:24:52 +02:00
Cédric Verstraeten
56f7d69b3d Increase channel buffer sizes for communication handling 2025-04-23 11:05:16 +00:00
Cédric Verstraeten
3e2b29284e Add secondary KStorage configuration to the Config struct 2025-04-18 12:11:37 +00:00
Cédric Verstraeten
18ceca7510 Merge pull request #175 from kerberos-io/fix/remote-region-not-properly-calculated
Fix/Remote config / region not properly calculated
2025-04-18 14:06:37 +02:00
Cédric Verstraeten
5a08d1f3de Update main.go 2025-04-18 14:03:23 +02:00
Cédric Verstraeten
18af6db00c Update base image in Dockerfile to version af04230 2025-04-15 13:17:07 +00:00
Cédric Verstraeten
6d170c8dc0 change to version 1.24 + change workflow name 2025-04-15 15:00:51 +02:00
Cédric Verstraeten
9c4c3c654d Update main.go 2025-04-15 14:23:56 +02:00
Cédric Verstraeten
6952e387f4 Merge pull request #172 from kerberos-io/improvement/cleanup-and-refactors
Enhancement / Cleanup and refactoring of documentation
2025-04-15 14:15:57 +02:00
Cédric Verstraeten
66c9ae5c27 Merge pull request #174 from kerberos-io/feature/webrtc-handle-nacks
Feature / enable interceptors for NACK and retransmission of packets
2025-04-15 14:15:19 +02:00
Cédric Verstraeten
0fb7601dcb Update main.go 2025-04-15 13:49:18 +02:00
Cédric Verstraeten
07c6e680d1 Create default .env 2025-04-14 13:15:18 +02:00
cedricve
b972bc3040 Fix: Convert audio type to mpeg4audio.ObjectType in WriteMPEG4Audio function
Updated the WriteMPEG4Audio function to convert the audio type from forma.Config.Type to mpeg4audio.ObjectType. This change ensures that the correct object type is used when creating ADTSPacket instances for MPEG-4 audio.
2025-04-13 17:28:41 +00:00
cedricve
969d42dbca Remove Travis CI configuration and build script 2025-04-13 07:54:20 +00:00
Cédric Verstraeten
6680df9382 Merge pull request #171 from kerberos-io/feature/improve-dev-container
Improvement / Refactor Dockerfile and devcontainer configuration;
2025-04-13 09:52:20 +02:00
cedricve
8877157db5 Refactor Dockerfile and devcontainer configuration; add FFmpeg and Node.js installation 2025-04-13 07:46:06 +00:00
Cédric Verstraeten
ac814dc357 Merge pull request #168 from thanhtantran/master
add Vietnamese locales
2025-04-11 21:24:14 +02:00
Orange Pi Vietnam
4fcb12c3a3 Update translation.json
remove duplicate
2025-04-08 22:31:25 +07:00
Tony Tran
7bcc30f4b7 add Vietnamese locales 2025-02-27 15:06:41 +00:00
Cédric Verstraeten
481f917fcf Merge pull request #166 from kerberos-io/fix/candidategather-extended-buffer
Fix ice candidate gather and extended candidate buffer
2025-02-11 21:56:49 +01:00
Cedric Verstraeten
700a32e4c8 Update main.go 2025-02-11 21:51:14 +01:00
43 changed files with 4646 additions and 937 deletions

View File

@@ -1,2 +1,26 @@
FROM kerberos/devcontainer:5da0fe7
LABEL AUTHOR=Kerberos.io
FROM mcr.microsoft.com/devcontainers/go:1.24-bookworm
# Install node environment
RUN apt-get update && \
apt-get install -y --no-install-recommends \
nodejs \
npm \
&& rm -rf /var/lib/apt/lists/*
# Install ffmpeg
RUN apt-get update && \
apt-get install -y --no-install-recommends \
ffmpeg \
libavcodec-extra \
libavutil-dev \
libavformat-dev \
libavfilter-dev \
libavdevice-dev \
libswscale-dev \
libswresample-dev \
&& rm -rf /var/lib/apt/lists/*
USER vscode
# Install go swagger
RUN go install github.com/swaggo/swag/cmd/swag@latest

View File

@@ -1,21 +1,24 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/python
{
"name": "A Dockerfile containing FFmpeg, OpenCV, Go and Yarn",
"context": "..",
"dockerFile": "./Dockerfile",
"forwardPorts": [
3000,
80
"name": "go:1.24-bookworm",
"runArgs": [
"--name=agent",
"--network=host"
],
"postCreateCommand": "cd ui && yarn install && yarn build && cd ../machinery && go mod download",
"dockerFile": "Dockerfile",
"customizations": {
"vscode": {
"extensions": [
"ms-kubernetes-tools.vscode-kubernetes-tools",
"ms-azuretools.vscode-docker",
"GitHub.copilot",
"golang.go",
"ms-vscode.vscode-typescript-next"
"ms-azuretools.vscode-docker",
"mongodb.mongodb-vscode"
]
}
}
},
"forwardPorts": [
3000,
8080
],
"postCreateCommand": "cd ui && yarn install && yarn build && cd ../machinery && go mod download"
}

View File

@@ -1,8 +1,13 @@
name: Docker release build
name: Create a new release
on:
release:
types: [created]
workflow_dispatch:
inputs:
tag:
description: "Tag for the Docker image"
required: true
default: "test"
env:
REPO: kerberos/agent
@@ -34,13 +39,14 @@ jobs:
- name: Available platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Run Buildx
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-${{matrix.architecture}}-${{github.ref_name}} --push .
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-${{matrix.architecture}}-${{github.event.inputs.tag || github.ref_name}} --push .
- name: Create new and append to manifest
run: docker buildx imagetools create -t $REPO:${{ github.ref_name }} $REPO-arch:arch-${{matrix.architecture}}-${{github.ref_name}}
run: docker buildx imagetools create -t $REPO:${{ github.event.inputs.tag || github.ref_name }} $REPO-arch:arch-${{matrix.architecture}}-${{github.event.inputs.tag || github.ref_name}}
- name: Create new and append to manifest latest
run: docker buildx imagetools create -t $REPO:latest $REPO-arch:arch-${{matrix.architecture}}-${{github.ref_name}}
run: docker buildx imagetools create -t $REPO:latest $REPO-arch:arch-${{matrix.architecture}}-${{github.event.inputs.tag || github.ref_name}}
if: github.event.inputs.tag == 'test'
- name: Run Buildx with output
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-$(echo ${{matrix.architecture}} | tr / -)-${{github.ref_name}} --output type=tar,dest=output-${{matrix.architecture}}.tar .
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-$(echo ${{matrix.architecture}} | tr / -)-${{github.event.inputs.tag || github.ref_name}} --output type=tar,dest=output-${{matrix.architecture}}.tar .
- name: Strip binary
run: mkdir -p output/ && tar -xf output-${{matrix.architecture}}.tar -C output && rm output-${{matrix.architecture}}.tar && cd output/ && tar -cf ../agent-${{matrix.architecture}}.tar -C home/agent . && rm -rf output
- name: Create a release
@@ -48,8 +54,8 @@ jobs:
with:
latest: true
allowUpdates: true
name: ${{ github.ref_name }}
tag: ${{ github.ref_name }}
name: ${{ github.event.inputs.tag || github.ref_name }}
tag: ${{ github.event.inputs.tag || github.ref_name }}
generateReleaseNotes: false
omitBodyDuringUpdate: true
artifacts: "agent-${{matrix.architecture}}.tar"
@@ -92,13 +98,14 @@ jobs:
- name: Available platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Run Buildx
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-${{matrix.architecture}}-${{github.ref_name}} --push .
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-${{matrix.architecture}}-${{github.event.inputs.tag || github.ref_name}} --push .
- name: Create new and append to manifest
run: docker buildx imagetools create --append -t $REPO:${{ github.ref_name }} $REPO-arch:arch-${{matrix.architecture}}-${{github.ref_name}}
run: docker buildx imagetools create --append -t $REPO:${{ github.event.inputs.tag || github.ref_name }} $REPO-arch:arch-${{matrix.architecture}}-${{github.event.inputs.tag || github.ref_name}}
- name: Create new and append to manifest latest
run: docker buildx imagetools create --append -t $REPO:latest $REPO-arch:arch-${{matrix.architecture}}-${{github.ref_name}}
run: docker buildx imagetools create --append -t $REPO:latest $REPO-arch:arch-${{matrix.architecture}}-${{github.event.inputs.tag || github.ref_name}}
if: github.event.inputs.tag == 'test'
- name: Run Buildx with output
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-$(echo ${{matrix.architecture}} | tr / -)-${{github.ref_name}} --output type=tar,dest=output-${{matrix.architecture}}.tar .
run: docker buildx build --platform linux/$(echo ${{matrix.architecture}} | tr - /) -t $REPO-arch:arch-$(echo ${{matrix.architecture}} | tr / -)-${{github.event.inputs.tag || github.ref_name}} --output type=tar,dest=output-${{matrix.architecture}}.tar .
- name: Strip binary
run: mkdir -p output/ && tar -xf output-${{matrix.architecture}}.tar -C output && rm output-${{matrix.architecture}}.tar && cd output/ && tar -cf ../agent-${{matrix.architecture}}.tar -C home/agent . && rm -rf output
- name: Create a release
@@ -106,8 +113,8 @@ jobs:
with:
latest: true
allowUpdates: true
name: ${{ github.ref_name }}
tag: ${{ github.ref_name }}
name: ${{ github.event.inputs.tag || github.ref_name }}
tag: ${{ github.event.inputs.tag || github.ref_name }}
generateReleaseNotes: false
omitBodyDuringUpdate: true
artifacts: "agent-${{matrix.architecture}}.tar"

View File

@@ -17,7 +17,7 @@ jobs:
matrix:
#No longer supported Go versions.
#go-version: ['1.17', '1.18', '1.19', '1.20', '1.21']
go-version: ["1.22"]
go-version: ["1.24"]
steps:
- name: Set up Go ${{ matrix.go-version }}

2
.gitignore vendored
View File

@@ -10,6 +10,6 @@ machinery/data/recordings
machinery/data/snapshots
machinery/test*
machinery/init-dev.sh
machinery/.env
machinery/.env.local
machinery/vendor
deployments/docker/private-docker-compose.yaml

View File

@@ -1,19 +0,0 @@
language: go
go:
- 1.12.x
- 1.13.x
- 1.14.x
- 1.15.x
- tip
before_install:
- cd machinery
- go mod download
script:
- go vet
- go test -race -coverprofile=coverage.txt -covermode=atomic
after_success:
- bash <(curl -s https://codecov.io/bash)

6
.vscode/launch.json vendored
View File

@@ -12,9 +12,11 @@
"program": "${workspaceFolder}/machinery/main.go",
"args": [
"-action",
"run"
"run",
"-port",
"8080"
],
"envFile": "${workspaceFolder}/machinery/.env",
"envFile": "${workspaceFolder}/machinery/.env.local",
"buildFlags": "--tags dynamic",
},
{

View File

@@ -1,5 +1,6 @@
FROM kerberos/base:5d5e86b AS build-machinery
ARG BASE_IMAGE_VERSION=70ec57e
FROM kerberos/base:${BASE_IMAGE_VERSION} AS build-machinery
LABEL AUTHOR=Kerberos.io
ENV GOROOT=/usr/local/go
@@ -43,8 +44,7 @@ RUN cd /go/src/github.com/kerberos-io/agent/machinery && \
mkdir -p /agent/data/log && \
mkdir -p /agent/data/recordings && \
mkdir -p /agent/data/capture-test && \
mkdir -p /agent/data/config && \
rm -rf /go/src/gitlab.com/
mkdir -p /agent/data/config
####################################
# Let's create a /dist folder containing just the files necessary for runtime.
@@ -58,18 +58,6 @@ RUN cp -r /agent ./
RUN /dist/agent/main version
###############################################
# Build Bento4 -> we want fragmented mp4 files
ENV BENTO4_VERSION 1.6.0-641
RUN cd /tmp && git clone https://github.com/axiomatic-systems/Bento4 && cd Bento4 && \
git checkout tags/v${BENTO4_VERSION} && \
cd Build && \
cmake -DCMAKE_BUILD_TYPE=Release .. && \
make && \
mv /tmp/Bento4/Build/mp4fragment /dist/agent/ && \
rm -rf /tmp/Bento4
FROM node:18.14.0-alpine3.16 AS build-ui
RUN apk update && apk upgrade --available && sync
@@ -111,7 +99,6 @@ RUN apk update && apk add ca-certificates curl libstdc++ libc6-compat --no-cache
# Try running agent
RUN mv /agent/* /home/agent/
RUN cp /home/agent/mp4fragment /usr/local/bin/
RUN /home/agent/main version
#######################
@@ -148,4 +135,4 @@ HEALTHCHECK CMD curl --fail http://localhost:80 || exit 1
# Leeeeettttt'ssss goooooo!!!
# Run the shizzle from the right working directory.
WORKDIR /home/agent
CMD ["./main", "-action", "run", "-port", "80"]
CMD ["./main", "-action", "run", "-port", "80"]

132
README.md
View File

@@ -190,68 +190,76 @@ Next to attaching the configuration file, it is also possible to override the co
| Name | Description | Default Value |
| --------------------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------ |
| `LOG_LEVEL` | Level for logging, could be "info", "warning", "debug", "error" or "fatal". | "info" |
| `LOG_OUTPUT` | Logging output format "json" or "text". | "text" |
| `AGENT_MODE` | You can choose to run this in 'release' for production, and or 'demo' for showcasing. | "release" |
| `AGENT_TLS_INSECURE` | Specify if you want to use `InsecureSkipVerify` for the internal HTTP client. | "false" |
| `AGENT_USERNAME` | The username used to authenticate against the Kerberos Agent login page. | "root" |
| `AGENT_PASSWORD` | The password used to authenticate against the Kerberos Agent login page. | "root" |
| `AGENT_KEY` | A unique identifier for your Kerberos Agent, this is auto-generated but can be overriden. | "" |
| `AGENT_NAME` | The agent friendly-name. | "agent" |
| `AGENT_TIMEZONE` | Timezone which is used for converting time. | "Africa/Ceuta" |
| `AGENT_REMOVE_AFTER_UPLOAD` | When enabled, recordings uploaded successfully to a storage will be removed from disk. | "true" |
| `AGENT_OFFLINE` | Makes sure no external connection is made. | "false" |
| `AGENT_AUTO_CLEAN` | Cleans up the recordings directory. | "true" |
| `AGENT_AUTO_CLEAN_MAX_SIZE` | If `AUTO_CLEAN` enabled, set the max size of the recordings directory (in MB). | "100" |
| `AGENT_TIME` | Enable the timetable for Kerberos Agent | "false" |
| `AGENT_TIMETABLE` | A (weekly) time table to specify when to make recordings "start1,end1,start2,end2;start1.. | "" |
| `AGENT_REGION_POLYGON` | A single polygon set for motion detection: "x1,y1;x2,y2;x3,y3;... | "" |
| `AGENT_CAPTURE_IPCAMERA_RTSP` | Full-HD RTSP endpoint to the camera you're targetting. | "" |
| `AGENT_CAPTURE_IPCAMERA_SUB_RTSP` | Sub-stream RTSP endpoint used for livestreaming (WebRTC). | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF` | Mark as a compliant ONVIF device. | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF_XADDR` | ONVIF endpoint/address running on the camera. | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF_USERNAME` | ONVIF username to authenticate against. | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF_PASSWORD` | ONVIF password to authenticate against. | "" |
| `AGENT_CAPTURE_MOTION` | Toggle for enabling or disabling motion. | "true" |
| `AGENT_CAPTURE_LIVEVIEW` | Toggle for enabling or disabling liveview. | "true" |
| `AGENT_CAPTURE_SNAPSHOTS` | Toggle for enabling or disabling snapshot generation. | "true" |
| `AGENT_CAPTURE_RECORDING` | Toggle for enabling making recordings. | "true" |
| `AGENT_CAPTURE_CONTINUOUS` | Toggle for enabling continuous "true" or motion "false". | "false" |
| `AGENT_CAPTURE_PRERECORDING` | If `CONTINUOUS` set to `false`, specify the recording time (seconds) before/after motion event. | "10" |
| `AGENT_CAPTURE_POSTRECORDING` | If `CONTINUOUS` set to `false`, specify the recording time (seconds) after motion event. | "20" |
| `AGENT_CAPTURE_MAXLENGTH` | The maximum length of a single recording (seconds). | "30" |
| `AGENT_CAPTURE_PIXEL_CHANGE` | If `CONTINUOUS` set to `false`, the number of pixel require to change before motion triggers. | "150" |
| `AGENT_CAPTURE_FRAGMENTED` | Set the format of the recorded MP4 to fragmented (suitable for HLS). | "false" |
| `AGENT_CAPTURE_FRAGMENTED_DURATION` | If `AGENT_CAPTURE_FRAGMENTED` set to `true`, define the duration (seconds) of a fragment. | "8" |
| `AGENT_MQTT_URI` | An MQTT broker endpoint that is used for bi-directional communication (live view, onvif, etc) | "tcp://mqtt.kerberos.io:1883" |
| `AGENT_MQTT_USERNAME` | Username of the MQTT broker. | "" |
| `AGENT_MQTT_PASSWORD` | Password of the MQTT broker. | "" |
| `AGENT_REALTIME_PROCESSING` | If `AGENT_REALTIME_PROCESSING` set to `true`, the agent will send key frames to the topic | "" |
| `AGENT_REALTIME_PROCESSING_TOPIC` | The topic to which keyframes will be sent in base64 encoded format. | "" |
| `AGENT_STUN_URI` | When using WebRTC, you'll need to provide a STUN server. | "stun:turn.kerberos.io:8443" |
| `AGENT_FORCE_TURN` | Force using a TURN server, by generating relay candidates only. | "false" |
| `AGENT_TURN_URI` | When using WebRTC, you'll need to provide a TURN server. | "turn:turn.kerberos.io:8443" |
| `AGENT_TURN_USERNAME` | TURN username used for WebRTC. | "username1" |
| `AGENT_TURN_PASSWORD` | TURN password used for WebRTC. | "password1" |
| `AGENT_CLOUD` | Store recordings in Kerberos Hub (s3), Kerberos Vault (kstorage), or Dropbox (dropbox). | "s3" |
| `AGENT_HUB_ENCRYPTION` | Turning on/off encryption of traffic from your Kerberos Agent to Kerberos Hub. | "true" |
| `AGENT_HUB_URI` | The Kerberos Hub API, defaults to our Kerberos Hub SAAS. | "https://api.hub.domain.com" |
| `AGENT_HUB_KEY` | The access key linked to your account in Kerberos Hub. | "" |
| `AGENT_HUB_PRIVATE_KEY` | The secret access key linked to your account in Kerberos Hub. | "" |
| `AGENT_HUB_REGION` | The Kerberos Hub region, to which you want to upload. | "" |
| `AGENT_HUB_SITE` | The site ID of a site you've created in your Kerberos Hub account. | "" |
| `AGENT_KERBEROSVAULT_URI` | The Kerberos Vault API url. | "https://vault.domain.com/api" |
| `AGENT_KERBEROSVAULT_ACCESS_KEY` | The access key of a Kerberos Vault account. | "" |
| `AGENT_KERBEROSVAULT_SECRET_KEY` | The secret key of a Kerberos Vault account. | "" |
| `AGENT_KERBEROSVAULT_PROVIDER` | A Kerberos Vault provider you have created (optional). | "" |
| `AGENT_KERBEROSVAULT_DIRECTORY` | The directory, in the Kerberos vault, where the recordings will be stored. | "" |
| `AGENT_DROPBOX_ACCESS_TOKEN` | The Access Token from your Dropbox app, that is used to leverage the Dropbox SDK. | "" |
| `AGENT_DROPBOX_DIRECTORY` | The directory, in Dropbox, where the recordings will be stored. | "" |
| `AGENT_ENCRYPTION` | Enable 'true' or disable 'false' end-to-end encryption for MQTT messages. | "false" |
| `AGENT_ENCRYPTION_RECORDINGS` | Enable 'true' or disable 'false' end-to-end encryption for recordings. | "false" |
| `AGENT_ENCRYPTION_FINGERPRINT` | The fingerprint of the keypair (public/private keys), so you know which one to use. | "" |
| `AGENT_ENCRYPTION_PRIVATE_KEY` | The private key (assymetric/RSA) to decrypt and sign requests send over MQTT. | "" |
| `AGENT_ENCRYPTION_SYMMETRIC_KEY` | The symmetric key (AES) to encrypt and decrypt requests sent over MQTT. | "" |
| `LOG_LEVEL` | Level for logging, could be "info", "warning", "debug", "error" or "fatal". | "info" |
| `LOG_OUTPUT` | Logging output format "json" or "text". | "text" |
| `AGENT_MODE` | You can choose to run this in 'release' for production, and or 'demo' for showcasing. | "release" |
| `AGENT_TLS_INSECURE` | Specify if you want to use `InsecureSkipVerify` for the internal HTTP client. | "false" |
| `AGENT_USERNAME` | The username used to authenticate against the Kerberos Agent login page. | "root" |
| `AGENT_PASSWORD` | The password used to authenticate against the Kerberos Agent login page. | "root" |
| `AGENT_KEY` | A unique identifier for your Kerberos Agent, this is auto-generated but can be overriden. | "" |
| `AGENT_NAME` | The agent friendly-name. | "agent" |
| `AGENT_TIMEZONE` | Timezone which is used for converting time. | "Africa/Ceuta" |
| `AGENT_REMOVE_AFTER_UPLOAD` | When enabled, recordings uploaded successfully to a storage will be removed from disk. | "true" |
| `AGENT_OFFLINE` | Makes sure no external connection is made. | "false" |
| `AGENT_AUTO_CLEAN` | Cleans up the recordings directory. | "true" |
| `AGENT_AUTO_CLEAN_MAX_SIZE` | If `AUTO_CLEAN` enabled, set the max size of the recordings directory (in MB). | "100" |
| `AGENT_TIME` | Enable the timetable for Kerberos Agent | "false" |
| `AGENT_TIMETABLE` | A (weekly) time table to specify when to make recordings "start1,end1,start2,end2;start1.. | "" |
| `AGENT_REGION_POLYGON` | A single polygon set for motion detection: "x1,y1;x2,y2;x3,y3;... | "" |
| `AGENT_CAPTURE_IPCAMERA_RTSP` | Full-HD RTSP endpoint to the camera you're targetting. | "" |
| `AGENT_CAPTURE_IPCAMERA_SUB_RTSP` | Sub-stream RTSP endpoint used for livestreaming (WebRTC). | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF` | Mark as a compliant ONVIF device. | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF_XADDR` | ONVIF endpoint/address running on the camera. | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF_USERNAME` | ONVIF username to authenticate against. | "" |
| `AGENT_CAPTURE_IPCAMERA_ONVIF_PASSWORD` | ONVIF password to authenticate against. | "" |
| `AGENT_CAPTURE_MOTION` | Toggle for enabling or disabling motion. | "true" |
| `AGENT_CAPTURE_LIVEVIEW` | Toggle for enabling or disabling liveview. | "true" |
| `AGENT_CAPTURE_SNAPSHOTS` | Toggle for enabling or disabling snapshot generation. | "true" |
| `AGENT_CAPTURE_RECORDING` | Toggle for enabling making recordings. | "true" |
| `AGENT_CAPTURE_CONTINUOUS` | Toggle for enabling continuous "true" or motion "false". | "false" |
| `AGENT_CAPTURE_PRERECORDING` | If `CONTINUOUS` set to `false`, specify the recording time (seconds) before/after motion event. | "10" |
| `AGENT_CAPTURE_POSTRECORDING` | If `CONTINUOUS` set to `false`, specify the recording time (seconds) after motion event. | "20" |
| `AGENT_CAPTURE_MAXLENGTH` | The maximum length of a single recording (seconds). | "30" |
| `AGENT_CAPTURE_PIXEL_CHANGE` | If `CONTINUOUS` set to `false`, the number of pixel require to change before motion triggers. | "150" |
| `AGENT_CAPTURE_FRAGMENTED` | Set the format of the recorded MP4 to fragmented (suitable for HLS). | "false" |
| `AGENT_CAPTURE_FRAGMENTED_DURATION` | If `AGENT_CAPTURE_FRAGMENTED` set to `true`, define the duration (seconds) of a fragment. | "8" |
| `AGENT_MQTT_URI` | An MQTT broker endpoint that is used for bi-directional communication (live view, onvif, etc) | "tcp://mqtt.kerberos.io:1883" |
| `AGENT_MQTT_USERNAME` | Username of the MQTT broker. | "" |
| `AGENT_MQTT_PASSWORD` | Password of the MQTT broker. | "" |
| `AGENT_REALTIME_PROCESSING` | If `AGENT_REALTIME_PROCESSING` set to `true`, the agent will send key frames to the topic | "" |
| `AGENT_REALTIME_PROCESSING_TOPIC` | The topic to which keyframes will be sent in base64 encoded format. | "" |
| `AGENT_STUN_URI` | When using WebRTC, you'll need to provide a STUN server. | "stun:turn.kerberos.io:8443" |
| `AGENT_FORCE_TURN` | Force using a TURN server, by generating relay candidates only. | "false" |
| `AGENT_TURN_URI` | When using WebRTC, you'll need to provide a TURN server. | "turn:turn.kerberos.io:8443" |
| `AGENT_TURN_USERNAME` | TURN username used for WebRTC. | "username1" |
| `AGENT_TURN_PASSWORD` | TURN password used for WebRTC. | "password1" |
| `AGENT_CLOUD` | Store recordings in Kerberos Hub (s3), Kerberos Vault (kstorage), or Dropbox (dropbox). | "s3" |
| `AGENT_HUB_ENCRYPTION` | Turning on/off encryption of traffic from your Kerberos Agent to Kerberos Hub. | "true" |
| `AGENT_HUB_URI` | The Kerberos Hub API, defaults to our Kerberos Hub SAAS. | "https://api.hub.domain.com" |
| `AGENT_HUB_KEY` | The access key linked to your account in Kerberos Hub. | "" |
| `AGENT_HUB_PRIVATE_KEY` | The secret access key linked to your account in Kerberos Hub. | "" |
| `AGENT_HUB_REGION` | The Kerberos Hub region, to which you want to upload. | "" |
| `AGENT_HUB_SITE` | The site ID of a site you've created in your Kerberos Hub account. | "" |
| `AGENT_KERBEROSVAULT_URI` | The Kerberos Vault API url. | "https://vault.domain.com/api" |
| `AGENT_KERBEROSVAULT_ACCESS_KEY` | The access key of a Kerberos Vault account. | "" |
| `AGENT_KERBEROSVAULT_SECRET_KEY` | The secret key of a Kerberos Vault account. | "" |
| `AGENT_KERBEROSVAULT_PROVIDER` | A Kerberos Vault provider you have created (optional). | "" |
| `AGENT_KERBEROSVAULT_DIRECTORY` | The directory, in the Kerberos vault, where the recordings will be stored. | "" |
| `AGENT_KERBEROSVAULT_SECONDARY_URI` | The Kerberos Vault API url. | "https://vault.domain.com/api" |
| `AGENT_KERBEROSVAULT_SECONDARY_ACCESS_KEY` | The access key of a secondary Kerberos Vault account. | "" |
| `AGENT_KERBEROSVAULT_SECONDARY_SECRET_KEY` | The secret key of a secondary Kerberos Vault account. | "" |
| `AGENT_KERBEROSVAULT_SECONDARY_PROVIDER` | A secondary Kerberos Vault provider you have created (optional). | "" |
| `AGENT_KERBEROSVAULT_SECONDARY_DIRECTORY` | The directory, in the secondary Kerberos vault, where the recordings will be stored. | "" |
| `AGENT_DROPBOX_ACCESS_TOKEN` | The Access Token from your Dropbox app, that is used to leverage the Dropbox SDK. | "" |
| `AGENT_DROPBOX_DIRECTORY` | The directory, in Dropbox, where the recordings will be stored. | "" |
| `AGENT_ENCRYPTION` | Enable 'true' or disable 'false' end-to-end encryption for MQTT messages. | "false" |
| `AGENT_ENCRYPTION_RECORDINGS` | Enable 'true' or disable 'false' end-to-end encryption for recordings. | "false" |
| `AGENT_ENCRYPTION_FINGERPRINT` | The fingerprint of the keypair (public/private keys), so you know which one to use. | "" |
| `AGENT_ENCRYPTION_PRIVATE_KEY` | The private key (assymetric/RSA) to decrypt and sign requests send over MQTT. | "" |
| `AGENT_ENCRYPTION_SYMMETRIC_KEY` | The symmetric key (AES) to encrypt and decrypt requests sent over MQTT. | "" |
| `AGENT_SIGNING` | Enable 'true' or disable 'false' for signing recordings. | "true" |
| `AGENT_SIGNING_PRIVATE_KEY` | The private key (RSA) to sign the recordings fingerprint to validate origin. | "" - uses default one if empty |
## Encryption

View File

@@ -1,10 +0,0 @@
export version=0.0.1
export name=agent
docker build -t $name .
docker tag $name kerberos/$name:$version
docker push kerberos/$name:$version
docker tag $name kerberos/$name:latest
docker push kerberos/$name:latest

View File

@@ -41,7 +41,7 @@ You attach a volume to your container by leveraging the `-v` option. To mount yo
### Override with environment variables
Next to attaching the configuration file, it is also possible to override the configuration with environment variables. This makes deployments easier when leveraging `docker compose` or `kubernetes` deployments much easier and scalable. Using this approach we simplify automation through `ansible` and `terraform`. You'll find [the full list of environment variables on the main README.md file](https://github.com/kerberos-io/agent#override-with-environment-variables).
Next to attaching the configuration file, it is also possible to override the configuration with environment variables. This makes deployments when leveraging `docker compose` or `kubernetes` much easier and more scalable. Using this approach we simplify automation through `ansible` and `terraform`. You'll find [the full list of environment variables on the main README.md file](https://github.com/kerberos-io/agent#override-with-environment-variables).
### 2. Running multiple containers with Docker compose

BIN
machinery/.DS_Store vendored Normal file

Binary file not shown.

31
machinery/.env Normal file
View File

@@ -0,0 +1,31 @@
AGENT_NAME=camera-name
AGENT_KEY=uniq-camera-id
AGENT_TIMEZONE=Europe/Brussels
#AGENT_CAPTURE_CONTINUOUS=true
#AGENT_CAPTURE_IPCAMERA_RTSP=rtsp://fake.kerberos.io/stream
#AGENT_CAPTURE_IPCAMERA_SUB_RTSP=rtsp://fake.kerberos.io/stream
AGENT_CAPTURE_IPCAMERA_ONVIF_XADDR=x.x.x.x
AGENT_CAPTURE_IPCAMERA_ONVIF_USERNAME=xxx
AGENT_CAPTURE_IPCAMERA_ONVIF_PASSWORD=xxx
AGENT_HUB_URI=https://api.cloud.kerberos.io
AGENT_HUB_KEY=AKIXxxx4JBEI
AGENT_HUB_PRIVATE_KEY=DIOXxxxAlYpaxxxxXioL0txxx
AGENT_HUB_SITE=681xxxxxxx9bcda5
# By default will send to Hub (=S3), if you wish to send to Kerberos Vault, set to "kstorage"
AGENT_CLOUD=s3
AGENT_KERBEROSVAULT_URI=
AGENT_KERBEROSVAULT_PROVIDER=
AGENT_KERBEROSVAULT_DIRECTORY=
AGENT_KERBEROSVAULT_ACCESS_KEY=
AGENT_KERBEROSVAULT_SECRET_KEY=
AGENT_KERBEROSVAULT_MAX_RETRIES=10
AGENT_KERBEROSVAULT_TIMEOUT=120
AGENT_KERBEROSVAULT_SECONDARY_URI=
AGENT_KERBEROSVAULT_SECONDARY_PROVIDER=
AGENT_KERBEROSVAULT_SECONDARY_DIRECTORY=
AGENT_KERBEROSVAULT_SECONDARY_ACCESS_KEY=
AGENT_KERBEROSVAULT_SECONDARY_SECRET_KEY=
# Open telemetry tracing endpoint
OTEL_EXPORTER_OTLP_ENDPOINT=

View File

@@ -98,6 +98,7 @@
"region": "eu-west-1"
},
"kstorage": {},
"kstorage_secondary": {},
"dropbox": {},
"mqtturi": "tcp://mqtt.kerberos.io:1883",
"mqtt_username": "",
@@ -115,6 +116,7 @@
"hub_site": "",
"condition_uri": "",
"encryption": {},
"signing": {},
"realtimeprocessing": "false",
"realtimeprocessing_topic": ""
}
}

View File

@@ -1,147 +1,113 @@
module github.com/kerberos-io/agent/machinery
go 1.23.1
go 1.24.2
replace google.golang.org/genproto => google.golang.org/genproto v0.0.0-20250519155744-55703ea1f237
require (
github.com/Eyevinn/mp4ff v0.48.0
github.com/InVisionApp/conjungo v1.1.0
github.com/appleboy/gin-jwt/v2 v2.10.1
github.com/bluenviron/gortsplib/v4 v4.12.1
github.com/bluenviron/mediacommon v1.13.3
github.com/appleboy/gin-jwt/v2 v2.10.3
github.com/bluenviron/gortsplib/v4 v4.14.1
github.com/bluenviron/mediacommon v1.14.0
github.com/cedricve/go-onvif v0.0.0-20200222191200-567e8ce298f6
github.com/dromara/carbon/v2 v2.5.2
github.com/dromara/carbon/v2 v2.6.8
github.com/dropbox/dropbox-sdk-go-unofficial/v6 v6.0.5
github.com/eclipse/paho.mqtt.golang v1.5.0
github.com/elastic/go-sysinfo v1.15.0
github.com/gin-contrib/cors v1.7.3
github.com/gin-contrib/pprof v1.5.2
github.com/gin-gonic/contrib v0.0.0-20241229022435-d12709533de6
github.com/gin-gonic/gin v1.10.0
github.com/elastic/go-sysinfo v1.15.3
github.com/gin-contrib/cors v1.7.5
github.com/gin-contrib/pprof v1.5.3
github.com/gin-gonic/contrib v0.0.0-20250521004450-2b1292699c15
github.com/gin-gonic/gin v1.10.1
github.com/gofrs/uuid v4.4.0+incompatible
github.com/golang-jwt/jwt/v4 v4.5.1
github.com/golang-jwt/jwt/v4 v4.5.2
github.com/gorilla/websocket v1.5.3
github.com/kellydunn/golang-geo v0.7.0
github.com/kerberos-io/joy4 v1.0.64
github.com/kerberos-io/onvif v1.0.0
github.com/minio/minio-go/v6 v6.0.57
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
github.com/pion/rtp v1.8.10
github.com/pion/webrtc/v4 v4.0.7
github.com/pion/interceptor v0.1.40
github.com/pion/rtp v1.8.19
github.com/pion/webrtc/v4 v4.1.2
github.com/sirupsen/logrus v1.9.3
github.com/swaggo/files v1.0.1
github.com/swaggo/gin-swagger v1.6.0
github.com/swaggo/swag v1.16.4
github.com/tevino/abool v1.2.0
github.com/yapingcat/gomedia v0.0.0-20240906162731-17feea57090c
github.com/zaf/g711 v1.4.0
go.mongodb.org/mongo-driver v1.17.1
gopkg.in/DataDog/dd-trace-go.v1 v1.70.1
go.mongodb.org/mongo-driver v1.17.3
go.opentelemetry.io/otel v1.36.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.36.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0
go.opentelemetry.io/otel/sdk v1.36.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1
)
require (
github.com/DataDog/appsec-internal-go v1.9.0 // indirect
github.com/DataDog/datadog-agent/pkg/obfuscate v0.58.0 // indirect
github.com/DataDog/datadog-agent/pkg/proto v0.58.0 // indirect
github.com/DataDog/datadog-agent/pkg/remoteconfig/state v0.58.0 // indirect
github.com/DataDog/datadog-agent/pkg/trace v0.58.0 // indirect
github.com/DataDog/datadog-agent/pkg/util/log v0.58.0 // indirect
github.com/DataDog/datadog-agent/pkg/util/scrubber v0.58.0 // indirect
github.com/DataDog/datadog-go/v5 v5.5.0 // indirect
github.com/DataDog/go-libddwaf/v3 v3.5.1 // indirect
github.com/DataDog/go-runtime-metrics-internal v0.0.0-20241106155157-194426bbbd59 // indirect
github.com/DataDog/go-sqllexer v0.0.14 // indirect
github.com/DataDog/go-tuf v1.1.0-0.5.2 // indirect
github.com/DataDog/gostackparse v0.7.0 // indirect
github.com/DataDog/opentelemetry-mapping-go/pkg/otlp/attributes v0.20.0 // indirect
github.com/DataDog/sketches-go v1.4.5 // indirect
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
github.com/PuerkitoBio/purell v1.1.1 // indirect
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
github.com/beevik/etree v1.2.0 // indirect
github.com/bytedance/sonic v1.12.6 // indirect
github.com/bytedance/sonic/loader v0.2.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cihub/seelog v0.0.0-20170130134532-f561c5e57575 // indirect
github.com/bluenviron/mediacommon/v2 v2.2.0 // indirect
github.com/bytedance/sonic v1.13.2 // indirect
github.com/bytedance/sonic/loader v0.2.4 // indirect
github.com/cenkalti/backoff/v5 v5.0.2 // indirect
github.com/clbanning/mxj v1.8.4 // indirect
github.com/clbanning/mxj/v2 v2.7.0 // indirect
github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/eapache/queue/v2 v2.0.0-20230407133247-75960ed334e4 // indirect
github.com/ebitengine/purego v0.6.0-alpha.5 // indirect
github.com/elastic/go-windows v1.0.0 // indirect
github.com/cloudwego/base64x v0.1.5 // indirect
github.com/elastic/go-windows v1.0.2 // indirect
github.com/elgs/gostrgen v0.0.0-20161222160715-9d61ae07eeae // indirect
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 // indirect
github.com/gabriel-vasile/mimetype v1.4.7 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
github.com/gin-contrib/sse v1.0.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
github.com/go-openapi/jsonreference v0.19.6 // indirect
github.com/go-openapi/spec v0.20.4 // indirect
github.com/go-openapi/swag v0.19.15 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.23.0 // indirect
github.com/goccy/go-json v0.10.4 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/go-playground/validator/v10 v10.26.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7 // indirect
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 // indirect
github.com/hashicorp/go-sockaddr v1.0.2 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect
github.com/icholy/digest v0.1.23 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/juju/errors v1.0.0 // indirect
github.com/klauspost/compress v1.17.1 // indirect
github.com/klauspost/compress v1.16.7 // indirect
github.com/klauspost/cpuid v1.2.3 // indirect
github.com/klauspost/cpuid/v2 v2.2.9 // indirect
github.com/klauspost/cpuid/v2 v2.2.10 // indirect
github.com/kylelemons/go-gypsy v1.0.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/lib/pq v1.10.2 // indirect
github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/lib/pq v1.10.9 // indirect
github.com/mailru/easyjson v0.7.6 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/minio/md5-simd v1.1.0 // indirect
github.com/minio/sha256-simd v0.1.1 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/montanaflynn/stats v0.7.1 // indirect
github.com/nxadm/tail v1.4.11 // indirect
github.com/outcaste-io/ristretto v0.2.3 // indirect
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/philhofer/fwd v1.1.3-0.20240612014219-fbbf4953d986 // indirect
github.com/pion/datachannel v1.5.10 // indirect
github.com/pion/dtls/v3 v3.0.4 // indirect
github.com/pion/ice/v4 v4.0.3 // indirect
github.com/pion/interceptor v0.1.37 // indirect
github.com/pion/logging v0.2.2 // indirect
github.com/pion/dtls/v3 v3.0.6 // indirect
github.com/pion/ice/v4 v4.0.10 // indirect
github.com/pion/logging v0.2.3 // indirect
github.com/pion/mdns/v2 v2.0.7 // indirect
github.com/pion/randutil v0.1.0 // indirect
github.com/pion/rtcp v1.2.15 // indirect
github.com/pion/sctp v1.8.35 // indirect
github.com/pion/sdp/v3 v3.0.9 // indirect
github.com/pion/srtp/v3 v3.0.4 // indirect
github.com/pion/sctp v1.8.39 // indirect
github.com/pion/sdp/v3 v3.0.13 // indirect
github.com/pion/srtp/v3 v3.0.5 // indirect
github.com/pion/stun/v3 v3.0.0 // indirect
github.com/pion/transport/v3 v3.0.7 // indirect
github.com/pion/turn/v4 v4.0.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c // indirect
github.com/prometheus/procfs v0.15.1 // indirect
github.com/richardartoul/molecule v1.0.1-0.20240531184615-7ca0df43c0b3 // indirect
github.com/ryanuber/go-glob v1.0.0 // indirect
github.com/secure-systems-lab/go-securesystemslib v0.7.0 // indirect
github.com/shirou/gopsutil/v3 v3.24.4 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/tinylib/msgp v1.2.1 // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect
github.com/wlynxg/anet v0.0.5 // indirect
@@ -149,34 +115,24 @@ require (
github.com/xdg-go/scram v1.1.2 // indirect
github.com/xdg-go/stringprep v1.0.4 // indirect
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/ziutek/mymysql v1.5.4 // indirect
go.opentelemetry.io/collector/component v0.104.0 // indirect
go.opentelemetry.io/collector/config/configtelemetry v0.104.0 // indirect
go.opentelemetry.io/collector/pdata v1.11.0 // indirect
go.opentelemetry.io/collector/semconv v0.104.0 // indirect
go.opentelemetry.io/otel v1.27.0 // indirect
go.opentelemetry.io/otel/metric v1.27.0 // indirect
go.opentelemetry.io/otel/trace v1.27.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
go.uber.org/zap v1.27.0 // indirect
golang.org/x/arch v0.12.0 // indirect
golang.org/x/crypto v0.31.0 // indirect
golang.org/x/mod v0.20.0 // indirect
golang.org/x/net v0.33.0 // indirect
golang.org/x/oauth2 v0.18.0 // indirect
golang.org/x/sync v0.10.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect
golang.org/x/time v0.6.0 // indirect
golang.org/x/tools v0.24.0 // indirect
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240520151616-dc85e6b867a5 // indirect
google.golang.org/grpc v1.64.0 // indirect
google.golang.org/protobuf v1.36.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/otel/metric v1.36.0 // indirect
go.opentelemetry.io/otel/trace v1.36.0 // indirect
go.opentelemetry.io/proto/otlp v1.6.0 // indirect
golang.org/x/arch v0.16.0 // indirect
golang.org/x/crypto v0.38.0 // indirect
golang.org/x/net v0.40.0 // indirect
golang.org/x/oauth2 v0.30.0 // indirect
golang.org/x/sync v0.14.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.25.0 // indirect
golang.org/x/tools v0.30.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250519155744-55703ea1f237 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250519155744-55703ea1f237 // indirect
google.golang.org/grpc v1.72.1 // indirect
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/ini.v1 v1.42.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
howett.net/plist v0.0.0-20181124034731-591f970eefbb // indirect

File diff suppressed because it is too large Load Diff

View File

@@ -3,6 +3,7 @@ package main
import (
"context"
"flag"
"fmt"
"os"
"time"
@@ -11,48 +12,62 @@ import (
"github.com/kerberos-io/agent/machinery/src/log"
"github.com/kerberos-io/agent/machinery/src/models"
"github.com/kerberos-io/agent/machinery/src/onvif"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/exporters/otlp/otlptrace"
"go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp"
"go.opentelemetry.io/otel/sdk/resource"
"go.opentelemetry.io/otel/sdk/trace"
semconv "go.opentelemetry.io/otel/semconv/v1.4.0"
configService "github.com/kerberos-io/agent/machinery/src/config"
"github.com/kerberos-io/agent/machinery/src/routers"
"github.com/kerberos-io/agent/machinery/src/utils"
"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer"
"gopkg.in/DataDog/dd-trace-go.v1/profiler"
)
var VERSION = utils.VERSION
func main() {
// You might be interested in debugging the agent.
if os.Getenv("DATADOG_AGENT_ENABLED") == "true" {
if os.Getenv("DATADOG_AGENT_K8S_ENABLED") == "true" {
tracer.Start()
defer tracer.Stop()
} else {
service := os.Getenv("DATADOG_AGENT_SERVICE")
environment := os.Getenv("DATADOG_AGENT_ENVIRONMENT")
log.Log.Info("Starting Datadog Agent with service: " + service + " and environment: " + environment)
rules := []tracer.SamplingRule{tracer.RateRule(1)}
tracer.Start(
tracer.WithSamplingRules(rules),
tracer.WithService(service),
tracer.WithEnv(environment),
)
defer tracer.Stop()
err := profiler.Start(
profiler.WithService(service),
profiler.WithEnv(environment),
profiler.WithProfileTypes(
profiler.CPUProfile,
profiler.HeapProfile,
),
)
if err != nil {
log.Log.Fatal(err.Error())
}
defer profiler.Stop()
}
func startTracing(agentKey string, otelEndpoint string) (*trace.TracerProvider, error) {
serviceName := "agent-" + agentKey
headers := map[string]string{
"content-type": "application/json",
}
exporter, err := otlptrace.New(
context.Background(),
otlptracehttp.NewClient(
otlptracehttp.WithEndpoint(otelEndpoint),
otlptracehttp.WithHeaders(headers),
otlptracehttp.WithInsecure(),
),
)
if err != nil {
return nil, fmt.Errorf("creating new exporter: %w", err)
}
tracerprovider := trace.NewTracerProvider(
trace.WithBatcher(
exporter,
trace.WithMaxExportBatchSize(trace.DefaultMaxExportBatchSize),
trace.WithBatchTimeout(trace.DefaultScheduleDelay*time.Millisecond),
trace.WithMaxExportBatchSize(trace.DefaultMaxExportBatchSize),
),
trace.WithResource(
resource.NewWithAttributes(
semconv.SchemaURL,
semconv.ServiceNameKey.String(serviceName),
attribute.String("environment", "develop"),
),
),
)
otel.SetTracerProvider(tracerprovider)
return tracerprovider, nil
}
func main() {
// Start the show ;)
// We'll parse the flags (named variables), and start the agent.
@@ -114,7 +129,7 @@ func main() {
case "run":
{
// Print Kerberos.io ASCII art
// Print Agent ASCII art
utils.PrintASCIIArt()
// Print the environment variables which include "AGENT_" as prefix.
@@ -127,12 +142,29 @@ func main() {
configuration.Name = name
configuration.Port = port
// Open this configuration either from Kerberos Agent or Kerberos Factory.
// Open this configuration either from Agent or Factory.
configService.OpenConfig(configDirectory, &configuration)
// We will override the configuration with the environment variables
configService.OverrideWithEnvironmentVariables(&configuration)
// Start OpenTelemetry tracing
if otelEndpoint := os.Getenv("OTEL_EXPORTER_OTLP_ENDPOINT"); otelEndpoint == "" {
log.Log.Info("main.Main(): No OpenTelemetry endpoint provided, skipping tracing")
} else {
log.Log.Info("main.Main(): Starting OpenTelemetry tracing with endpoint: " + otelEndpoint)
agentKey := configuration.Config.Key
traceProvider, err := startTracing(agentKey, otelEndpoint)
if err != nil {
log.Log.Error("traceprovider: " + err.Error())
}
defer func() {
if err := traceProvider.Shutdown(context.Background()); err != nil {
log.Log.Error("traceprovider: " + err.Error())
}
}()
}
// Printing final configuration
utils.PrintConfiguration(&configuration)
@@ -175,7 +207,7 @@ func main() {
HandleBootstrap: make(chan string, 1),
}
go components.Bootstrap(configDirectory, &configuration, &communication, &capture)
go components.Bootstrap(ctx, configDirectory, &configuration, &communication, &capture)
// Start the REST API.
routers.StartWebserver(configDirectory, &configuration, &communication, &capture)

View File

@@ -38,16 +38,16 @@ func (c *Capture) SetBackChannelClient(rtspUrl string) *Golibrtsp {
// RTSPClient is a interface that abstracts the RTSP client implementation.
type RTSPClient interface {
// Connect to the RTSP server.
Connect(ctx context.Context) error
Connect(ctx context.Context, otelContext context.Context) error
// Connect to a backchannel RTSP server.
ConnectBackChannel(ctx context.Context) error
ConnectBackChannel(ctx context.Context, otelContext context.Context) error
// Start the RTSP client, and start reading packets.
Start(ctx context.Context, streamType string, queue *packets.Queue, configuration *models.Configuration, communication *models.Communication) error
// Start the RTSP client, and start reading packets.
StartBackChannel(ctx context.Context) (err error)
StartBackChannel(ctx context.Context, otelContext context.Context) error
// Decode a packet into a image.
DecodePacket(pkt packets.Packet) (image.YCbCr, error)
@@ -59,7 +59,7 @@ type RTSPClient interface {
WritePacket(pkt packets.Packet) error
// Close the connection to the RTSP server.
Close() error
Close(ctx context.Context) error
// Get a list of streams from the RTSP server.
GetStreams() ([]packets.Stream, error)

View File

@@ -33,8 +33,11 @@ import (
"github.com/kerberos-io/agent/machinery/src/models"
"github.com/kerberos-io/agent/machinery/src/packets"
"github.com/pion/rtp"
"go.opentelemetry.io/otel"
)
var tracer = otel.Tracer("github.com/kerberos-io/agent/machinery/src/capture")
// Implements the RTSPClient interface.
type Golibrtsp struct {
RTSPClient
@@ -81,6 +84,21 @@ type Golibrtsp struct {
AudioMPEG4Decoder *rtpmpeg4audio.Decoder
Streams []packets.Stream
// FPS calculation fields
lastFrameTime time.Time
frameTimeBuffer []time.Duration
frameBufferSize int
frameBufferIndex int
fpsMutex sync.Mutex
// I-frame interval tracking fields
packetsSinceLastKeyframe int
lastKeyframePacketCount int
keyframeIntervals []int
keyframeBufferSize int
keyframeBufferIndex int
keyframeMutex sync.Mutex
}
// Init function
@@ -103,7 +121,10 @@ func init() {
}
// Connect to the RTSP server.
func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
func (g *Golibrtsp) Connect(ctx context.Context, ctxOtel context.Context) (err error) {
_, span := tracer.Start(ctxOtel, "Connect")
defer span.End()
transport := gortsplib.TransportTCP
g.Client = gortsplib.Client{
@@ -131,8 +152,9 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
return
}
// Iniatlise the mutex.
// Initialize the mutex and FPS calculation.
g.VideoDecoderMutex = &sync.Mutex{}
g.initFPSCalculation()
// find the H264 media and format
var formaH264 *format.H264
@@ -156,7 +178,9 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
// but try to fetch it later on.
if errSPS != nil {
log.Log.Debug("capture.golibrtsp.Connect(H264): " + errSPS.Error())
streamIndex := len(g.Streams)
g.Streams = append(g.Streams, packets.Stream{
Index: streamIndex,
Name: formaH264.Codec(),
IsVideo: true,
IsAudio: false,
@@ -168,7 +192,9 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
IsBackChannel: false,
})
} else {
streamIndex := len(g.Streams)
g.Streams = append(g.Streams, packets.Stream{
Index: streamIndex,
Name: formaH264.Codec(),
IsVideo: true,
IsAudio: false,
@@ -216,8 +242,9 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
log.Log.Info("capture.golibrtsp.Connect(H265): " + err.Error())
return
}
streamIndex := len(g.Streams)
g.Streams = append(g.Streams, packets.Stream{
Index: streamIndex,
Name: formaH265.Codec(),
IsVideo: true,
IsAudio: false,
@@ -265,8 +292,9 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
log.Log.Error("capture.golibrtsp.Connect(G711): " + err.Error())
} else {
g.AudioG711Decoder = audiortpDec
streamIndex := len(g.Streams)
g.Streams = append(g.Streams, packets.Stream{
Index: streamIndex,
Name: "PCM_MULAW",
IsVideo: false,
IsAudio: true,
@@ -300,8 +328,9 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
log.Log.Error("capture.golibrtsp.Connect(Opus): " + err.Error())
} else {
g.AudioOpusDecoder = audiortpDec
streamIndex := len(g.Streams)
g.Streams = append(g.Streams, packets.Stream{
Index: streamIndex,
Name: "OPUS",
IsVideo: false,
IsAudio: true,
@@ -328,11 +357,15 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
// Something went wrong .. Do something
log.Log.Error("capture.golibrtsp.Connect(MPEG4): " + err.Error())
} else {
streamIndex := len(g.Streams)
g.Streams = append(g.Streams, packets.Stream{
Index: streamIndex,
Name: "AAC",
IsVideo: false,
IsAudio: true,
IsBackChannel: false,
SampleRate: audioFormaMPEG4.Config.SampleRate,
Channels: audioFormaMPEG4.Config.ChannelCount,
})
// Set the index for the audio
@@ -352,7 +385,11 @@ func (g *Golibrtsp) Connect(ctx context.Context) (err error) {
return
}
func (g *Golibrtsp) ConnectBackChannel(ctx context.Context) (err error) {
func (g *Golibrtsp) ConnectBackChannel(ctx context.Context, ctxRunAgent context.Context) (err error) {
_, span := tracer.Start(ctxRunAgent, "ConnectBackChannel")
defer span.End()
// Transport TCP
transport := gortsplib.TransportTCP
g.Client = gortsplib.Client{
@@ -397,7 +434,9 @@ func (g *Golibrtsp) ConnectBackChannel(ctx context.Context) (err error) {
g.HasBackChannel = false
} else {
g.HasBackChannel = true
streamIndex := len(g.Streams)
g.Streams = append(g.Streams, packets.Stream{
Index: streamIndex,
Name: "PCM_MULAW",
IsVideo: false,
IsAudio: true,
@@ -551,6 +590,9 @@ func (g *Golibrtsp) Start(ctx context.Context, streamType string, queue *packets
var sps h264.SPS
errSPS := sps.Unmarshal(nalu)
if errSPS == nil {
// Debug SPS information
g.debugSPSInfo(&sps, streamType)
// Get width
g.Streams[g.VideoH264Index].Width = sps.Width()
if streamType == "main" {
@@ -565,12 +607,14 @@ func (g *Golibrtsp) Start(ctx context.Context, streamType string, queue *packets
} else if streamType == "sub" {
configuration.Config.Capture.IPCamera.SubHeight = sps.Height()
}
// Get FPS
g.Streams[g.VideoH264Index].FPS = sps.FPS()
// Get FPS using enhanced method
fps := g.getEnhancedFPS(&sps, g.VideoH264Index)
g.Streams[g.VideoH264Index].FPS = fps
log.Log.Debug(fmt.Sprintf("capture.golibrtsp.Start(%s): Final FPS=%.2f", streamType, fps))
g.VideoH264Forma.SPS = nalu
}
case h264.NALUTypePPS:
// Read out pps
g.VideoH264Forma.PPS = nalu
}
filteredAU = append(filteredAU, nalu)
@@ -607,6 +651,21 @@ func (g *Golibrtsp) Start(ctx context.Context, streamType string, queue *packets
Codec: "H264",
}
// Track keyframe intervals
keyframeInterval := g.trackKeyframeInterval(idrPresent)
if idrPresent && keyframeInterval > 0 {
avgInterval := g.getAverageKeyframeInterval()
gopDuration := float64(keyframeInterval) / g.Streams[g.VideoH265Index].FPS
gopSize := int(avgInterval) // Store GOP size in a separate variable
g.Streams[g.VideoH264Index].GopSize = gopSize
log.Log.Info(fmt.Sprintf("capture.golibrtsp.Start(%s): Keyframe interval=%d packets, Avg=%.1f, GOP=%.1fs, GOPSize=%d",
streamType, keyframeInterval, avgInterval, gopDuration, gopSize))
preRecording := configuration.Config.Capture.PreRecording
if preRecording > 0 && int(gopDuration) > 0 {
queue.SetMaxGopCount(int(preRecording)/int(gopDuration) + 1)
}
}
pkt.Data = pkt.Data[4:]
if pkt.IsKeyFrame {
annexbNALUStartCode := func() []byte { return []byte{0x00, 0x00, 0x00, 0x01} }
@@ -736,6 +795,21 @@ func (g *Golibrtsp) Start(ctx context.Context, streamType string, queue *packets
Codec: "H265",
}
// Track keyframe intervals for H265
keyframeInterval := g.trackKeyframeInterval(isRandomAccess)
if isRandomAccess && keyframeInterval > 0 {
avgInterval := g.getAverageKeyframeInterval()
gopDuration := float64(keyframeInterval) / g.Streams[g.VideoH265Index].FPS
gopSize := int(avgInterval) // Store GOP size in a separate variable
g.Streams[g.VideoH265Index].GopSize = gopSize
log.Log.Info(fmt.Sprintf("capture.golibrtsp.Start(%s): Keyframe interval=%d packets, Avg=%.1f, GOP=%.1fs, GOPSize=%d",
streamType, keyframeInterval, avgInterval, gopDuration, gopSize))
preRecording := configuration.Config.Capture.PreRecording
if preRecording > 0 && int(gopDuration) > 0 {
queue.SetMaxGopCount(int(preRecording)/int(gopDuration) + 1)
}
}
queue.WritePacket(pkt)
// This will check if we need to stop the thread,
@@ -778,7 +852,7 @@ func (g *Golibrtsp) Start(ctx context.Context, streamType string, queue *packets
}
// Start the RTSP client, and start reading packets.
func (g *Golibrtsp) StartBackChannel(ctx context.Context) (err error) {
func (g *Golibrtsp) StartBackChannel(ctx context.Context, ctxRunAgent context.Context) (err error) {
log.Log.Info("capture.golibrtsp.StartBackChannel(): started")
// Wait for a second, so we can be sure the stream is playing.
time.Sleep(1 * time.Second)
@@ -860,8 +934,8 @@ func (g *Golibrtsp) DecodePacketRaw(pkt packets.Packet) (image.Gray, error) {
}
// Get a list of streams from the RTSP server.
func (j *Golibrtsp) GetStreams() ([]packets.Stream, error) {
return j.Streams, nil
func (g *Golibrtsp) GetStreams() ([]packets.Stream, error) {
return g.Streams, nil
}
// Get a list of video streams from the RTSP server.
@@ -887,7 +961,11 @@ func (g *Golibrtsp) GetAudioStreams() ([]packets.Stream, error) {
}
// Close the connection to the RTSP server.
func (g *Golibrtsp) Close() error {
func (g *Golibrtsp) Close(ctxOtel context.Context) error {
_, span := tracer.Start(ctxOtel, "Close")
defer span.End()
// Close the demuxer.
g.Client.Close()
@@ -1089,7 +1167,7 @@ func WriteMPEG4Audio(forma *format.MPEG4Audio, aus [][]byte) ([]byte, error) {
pkts := make(mpeg4audio.ADTSPackets, len(aus))
for i, au := range aus {
pkts[i] = &mpeg4audio.ADTSPacket{
Type: forma.Config.Type,
Type: mpeg4audio.ObjectType(forma.Config.Type),
SampleRate: forma.Config.SampleRate,
ChannelCount: forma.Config.ChannelCount,
AU: au,
@@ -1101,3 +1179,185 @@ func WriteMPEG4Audio(forma *format.MPEG4Audio, aus [][]byte) ([]byte, error) {
}
return enc, nil
}
// Initialize FPS calculation buffers
func (g *Golibrtsp) initFPSCalculation() {
g.frameBufferSize = 30 // Store last 30 frame intervals
g.frameTimeBuffer = make([]time.Duration, g.frameBufferSize)
g.frameBufferIndex = 0
g.lastFrameTime = time.Time{}
// Initialize I-frame interval tracking
g.keyframeBufferSize = 10 // Store last 10 keyframe intervals
g.keyframeIntervals = make([]int, g.keyframeBufferSize)
g.keyframeBufferIndex = 0
g.packetsSinceLastKeyframe = 0
g.lastKeyframePacketCount = 0
}
// Calculate FPS from frame timestamps
func (g *Golibrtsp) calculateFPSFromTimestamps() float64 {
g.fpsMutex.Lock()
defer g.fpsMutex.Unlock()
if g.lastFrameTime.IsZero() {
g.lastFrameTime = time.Now()
return 0
}
now := time.Now()
interval := now.Sub(g.lastFrameTime)
g.lastFrameTime = now
// Store the interval
g.frameTimeBuffer[g.frameBufferIndex] = interval
g.frameBufferIndex = (g.frameBufferIndex + 1) % g.frameBufferSize
// Calculate average FPS from stored intervals
var totalInterval time.Duration
validSamples := 0
for _, interval := range g.frameTimeBuffer {
if interval > 0 {
totalInterval += interval
validSamples++
}
}
if validSamples == 0 {
return 0
}
avgInterval := totalInterval / time.Duration(validSamples)
if avgInterval == 0 {
return 0
}
return float64(time.Second) / float64(avgInterval)
}
// Get enhanced FPS information from SPS with fallback
func (g *Golibrtsp) getEnhancedFPS(sps *h264.SPS, streamIndex int8) float64 {
// First try to get FPS from SPS
spsFPS := sps.FPS()
// Check if SPS FPS is reasonable (between 1 and 120 fps)
if spsFPS > 0 && spsFPS <= 120 {
log.Log.Debug(fmt.Sprintf("capture.golibrtsp.getEnhancedFPS(): SPS FPS: %.2f", spsFPS))
return spsFPS
}
// Fallback to timestamp-based calculation
timestampFPS := g.calculateFPSFromTimestamps()
if timestampFPS > 0 && timestampFPS <= 120 {
log.Log.Debug(fmt.Sprintf("capture.golibrtsp.getEnhancedFPS(): Timestamp FPS: %.2f", timestampFPS))
return timestampFPS
}
// Return SPS FPS even if it seems unreasonable, or default
if spsFPS > 0 {
return spsFPS
}
return 25.0 // Default fallback FPS
}
// Track I-frame intervals by counting packets between keyframes
func (g *Golibrtsp) trackKeyframeInterval(isKeyframe bool) int {
g.keyframeMutex.Lock()
defer g.keyframeMutex.Unlock()
g.packetsSinceLastKeyframe++
if isKeyframe {
// Store the interval since the last keyframe
if g.lastKeyframePacketCount > 0 {
interval := g.packetsSinceLastKeyframe
g.keyframeIntervals[g.keyframeBufferIndex] = interval
g.keyframeBufferIndex = (g.keyframeBufferIndex + 1) % g.keyframeBufferSize
}
// Reset counter for next interval
g.lastKeyframePacketCount = g.packetsSinceLastKeyframe
g.packetsSinceLastKeyframe = 0
return g.lastKeyframePacketCount
}
return 0
}
// Get average keyframe interval (GOP size)
func (g *Golibrtsp) getAverageKeyframeInterval() float64 {
g.keyframeMutex.Lock()
defer g.keyframeMutex.Unlock()
var totalInterval int
validSamples := 0
for _, interval := range g.keyframeIntervals {
if interval > 0 {
totalInterval += interval
validSamples++
}
}
if validSamples == 0 {
return 0
}
return float64(totalInterval) / float64(validSamples)
}
// Calculate GOP size in seconds based on FPS and keyframe interval
func (g *Golibrtsp) getGOPDuration(fps float64) float64 {
avgInterval := g.getAverageKeyframeInterval()
if avgInterval > 0 && fps > 0 {
return avgInterval / fps
}
return 0
}
// Get detailed SPS timing information
func (g *Golibrtsp) getSPSTimingInfo(sps *h264.SPS) (hasVUI bool, timeScale uint32, numUnitsInTick uint32, fps float64) {
// Try to get FPS from SPS
fps = sps.FPS()
// Note: The gortsplib SPS struct may not expose VUI parameters directly
// but we can still work with the calculated FPS
if fps > 0 {
hasVUI = true
// These are estimated values based on common patterns
if fps == 25.0 {
timeScale = 50
numUnitsInTick = 1
} else if fps == 30.0 {
timeScale = 60
numUnitsInTick = 1
} else if fps == 24.0 {
timeScale = 48
numUnitsInTick = 1
} else {
// Generic calculation
timeScale = uint32(fps * 2)
numUnitsInTick = 1
}
}
return hasVUI, timeScale, numUnitsInTick, fps
}
// Debug SPS information
func (g *Golibrtsp) debugSPSInfo(sps *h264.SPS, streamType string) {
hasVUI, timeScale, numUnitsInTick, fps := g.getSPSTimingInfo(sps)
log.Log.Debug(fmt.Sprintf("capture.golibrtsp.debugSPSInfo(%s): Width=%d, Height=%d",
streamType, sps.Width(), sps.Height()))
log.Log.Debug(fmt.Sprintf("capture.golibrtsp.debugSPSInfo(%s): HasVUI=%t, FPS=%.2f",
streamType, hasVUI, fps))
if hasVUI {
log.Log.Debug(fmt.Sprintf("capture.golibrtsp.debugSPSInfo(%s): TimeScale=%d, NumUnitsInTick=%d",
streamType, timeScale, numUnitsInTick))
}
}

View File

@@ -16,7 +16,8 @@ import (
"github.com/kerberos-io/agent/machinery/src/models"
"github.com/kerberos-io/agent/machinery/src/packets"
"github.com/kerberos-io/agent/machinery/src/utils"
"github.com/yapingcat/gomedia/go-mp4"
"github.com/kerberos-io/agent/machinery/src/video"
"go.opentelemetry.io/otel/trace"
)
func CleanupRecordingDirectory(configDirectory string, configuration *models.Configuration) {
@@ -63,11 +64,12 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
} else {
log.Log.Debug("capture.main.HandleRecordStream(): started")
recordingPeriod := config.Capture.PostRecording // number of seconds to record.
maxRecordingPeriod := config.Capture.MaxLengthRecording // maximum number of seconds to record.
preRecording := config.Capture.PreRecording * 1000
postRecording := config.Capture.PostRecording * 1000 // number of seconds to record.
maxRecordingPeriod := config.Capture.MaxLengthRecording * 1000 // maximum number of seconds to record.
// Synchronise the last synced time
now := time.Now().Unix()
now := time.Now().UnixMilli()
startRecording := now
timestamp := now
@@ -75,14 +77,27 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
config.Name = config.FriendlyName
}
// For continuous and motion based recording we will use a single file.
var file *os.File
// Get the audio and video codec from the camera.
// We only expect one audio and one video codec.
// If there are multiple audio or video streams, we will use the first one.
audioCodec := ""
videoCodec := ""
audioStreams, _ := rtspClient.GetAudioStreams()
videoStreams, _ := rtspClient.GetVideoStreams()
if len(audioStreams) > 0 {
audioCodec = audioStreams[0].Name
config.Capture.IPCamera.SampleRate = audioStreams[0].SampleRate
config.Capture.IPCamera.Channels = audioStreams[0].Channels
}
if len(videoStreams) > 0 {
videoCodec = videoStreams[0].Name
}
// Check if continuous recording.
if config.Capture.Continuous == "true" {
//var cws *cacheWriterSeeker
var myMuxer *mp4.Movmuxer
var mp4Video *video.MP4
var videoTrack uint32
var audioTrack uint32
var name string
@@ -95,7 +110,7 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
start := false
// If continuous record the full length
recordingPeriod = maxRecordingPeriod
postRecording = maxRecordingPeriod
// Recording file name
fullName := ""
@@ -114,20 +129,21 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
nextPkt, cursorError = recordingCursor.ReadPacket()
now := time.Now().Unix()
now := time.Now().UnixMilli()
if start && // If already recording and current frame is a keyframe and we should stop recording
nextPkt.IsKeyFrame && (timestamp+recordingPeriod-now <= 0 || now-startRecording >= maxRecordingPeriod) {
nextPkt.IsKeyFrame && (timestamp+postRecording-now <= 0 || now-startRecording >= maxRecordingPeriod-1000) {
// Write the last packet
ttime := convertPTS(pkt.TimeLegacy)
pts := convertPTS(pkt.TimeLegacy)
if pkt.IsVideo {
if err := myMuxer.Write(videoTrack, pkt.Data, ttime, ttime); err != nil {
// Write the last packet
if err := mp4Video.AddSampleToTrack(videoTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
}
} else if pkt.IsAudio {
// Write the last packet
if pkt.Codec == "AAC" {
if err := myMuxer.Write(audioTrack, pkt.Data, ttime, ttime); err != nil {
if err := mp4Video.AddSampleToTrack(audioTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
}
} else if pkt.Codec == "PCM_MULAW" {
@@ -136,21 +152,44 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
}
}
// This will write the trailer a well.
if err := myMuxer.WriteTrailer(); err != nil {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
}
// Close mp4
mp4Video.Close(&config)
log.Log.Info("capture.main.HandleRecordStream(continuous): recording finished: file save: " + name)
// Cleanup muxer
start = false
file.Close()
file = nil
// Check if need to convert to fragmented using bento
if config.Capture.Fragmented == "true" && config.Capture.FragmentedDuration > 0 {
utils.CreateFragmentedMP4(fullName, config.Capture.FragmentedDuration)
// Update the name of the recording with the duration.
// We will update the name of the recording with the duration in milliseconds.
if mp4Video.VideoTotalDuration > 0 {
duration := mp4Video.VideoTotalDuration
// Update the name with the duration in milliseconds.
startRecordingSeconds := startRecording / 1000 // convert to seconds
startRecordingMilliseconds := startRecording % 1000 // convert to milliseconds
s := strconv.FormatInt(startRecordingSeconds, 10) + "_" +
strconv.Itoa(len(strconv.FormatInt(startRecordingMilliseconds, 10))) + "-" +
strconv.FormatInt(startRecordingMilliseconds, 10) + "_" +
config.Name + "_" +
"0-0-0-0" + "_" + // region coordinates, we
"-1" + "_" + // token
strconv.FormatInt(int64(duration), 10) // + "_" + // duration of recording
//utils.VERSION // version of the agent
oldName := name
name = s + ".mp4"
fullName = configDirectory + "/data/recordings/" + name
log.Log.Info("capture.main.HandleRecordStream(motiondetection): renamed file from: " + oldName + " to: " + name)
// Rename the file to the new name.
err := os.Rename(
configDirectory+"/data/recordings/"+oldName,
configDirectory+"/data/recordings/"+s+".mp4")
if err != nil {
log.Log.Error("capture.main.HandleRecordStream(motiondetection): error renaming file: " + err.Error())
}
} else {
log.Log.Info("capture.main.HandleRecordStream(continuous): no video data recorded, not renaming file.")
}
// Check if we need to encrypt the recording.
@@ -208,13 +247,17 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
// - Number of changes
// - Token
startRecording = time.Now().Unix() // we mark the current time when the record started.ss
s := strconv.FormatInt(startRecording, 10) + "_" +
"6" + "-" +
"967003" + "_" +
config.Name + "_" +
"200-200-400-400" + "_0_" +
"769"
startRecording = time.Now().UnixMilli()
startRecordingSeconds := startRecording / 1000 // convert to seconds
startRecordingMilliseconds := startRecording % 1000 // convert to milliseconds
s := strconv.FormatInt(startRecordingSeconds, 10) + "_" + // start timestamp in seconds
strconv.Itoa(len(strconv.FormatInt(startRecordingMilliseconds, 10))) + "-" + // length of milliseconds
strconv.FormatInt(startRecordingMilliseconds, 10) + "_" + // milliseconds
config.Name + "_" + // device name
"0-0-0-0" + "_" + // region coordinates, we will not use this for continuous recording
"0" + "_" + // token
"0" + "_" //+ // duration of recording in milliseconds
//utils.VERSION // version of the agent
name = s + ".mp4"
fullName = configDirectory + "/data/recordings/" + name
@@ -222,53 +265,61 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
// Running...
log.Log.Info("capture.main.HandleRecordStream(continuous): recording started")
file, err = os.Create(fullName)
if err == nil {
//cws = newCacheWriterSeeker(4096)
myMuxer, _ = mp4.CreateMp4Muxer(file)
// We choose between H264 and H265
width := configuration.Config.Capture.IPCamera.Width
height := configuration.Config.Capture.IPCamera.Height
widthOption := mp4.WithVideoWidth(uint32(width))
heightOption := mp4.WithVideoHeight(uint32(height))
if pkt.Codec == "H264" {
videoTrack = myMuxer.AddVideoTrack(mp4.MP4_CODEC_H264, widthOption, heightOption)
} else if pkt.Codec == "H265" {
videoTrack = myMuxer.AddVideoTrack(mp4.MP4_CODEC_H265, widthOption, heightOption)
}
// For an MP4 container, AAC is the only audio codec supported.
audioTrack = myMuxer.AddAudioTrack(mp4.MP4_CODEC_AAC)
} else {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
// Get width and height from the camera.
width := configuration.Config.Capture.IPCamera.Width
height := configuration.Config.Capture.IPCamera.Height
// Get SPS and PPS NALUs from the camera.
spsNALUS := configuration.Config.Capture.IPCamera.SPSNALUs
ppsNALUS := configuration.Config.Capture.IPCamera.PPSNALUs
vpsNALUS := configuration.Config.Capture.IPCamera.VPSNALUs
// Create a video file, and set the dimensions.
mp4Video = video.NewMP4(fullName, spsNALUS, ppsNALUS, vpsNALUS)
mp4Video.SetWidth(width)
mp4Video.SetHeight(height)
if videoCodec == "H264" {
videoTrack = mp4Video.AddVideoTrack("H264")
} else if videoCodec == "H265" {
videoTrack = mp4Video.AddVideoTrack("H265")
}
if audioCodec == "AAC" {
audioTrack = mp4Video.AddAudioTrack("AAC")
} else if audioCodec == "PCM_MULAW" {
log.Log.Debug("capture.main.HandleRecordStream(continuous): no AAC audio codec detected, skipping audio track.")
}
ttime := convertPTS(pkt.TimeLegacy)
pts := convertPTS(pkt.TimeLegacy)
if pkt.IsVideo {
if err := myMuxer.Write(videoTrack, pkt.Data, ttime, ttime); err != nil {
if err := mp4Video.AddSampleToTrack(videoTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
}
} else if pkt.IsAudio {
if pkt.Codec == "AAC" {
if err := myMuxer.Write(audioTrack, pkt.Data, ttime, ttime); err != nil {
if err := mp4Video.AddSampleToTrack(audioTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
}
} else if pkt.Codec == "PCM_MULAW" {
// TODO: transcode to AAC, some work to do..
// We might need to use ffmpeg to transcode the audio to AAC.
// For now we will skip the audio track.
log.Log.Debug("capture.main.HandleRecordStream(continuous): no AAC audio codec detected, skipping audio track.")
}
}
recordingStatus = "started"
} else if start {
ttime := convertPTS(pkt.TimeLegacy)
pts := convertPTS(pkt.TimeLegacy)
if pkt.IsVideo {
if err := myMuxer.Write(videoTrack, pkt.Data, ttime, ttime); err != nil {
// New method using new mp4 library
if err := mp4Video.AddSampleToTrack(videoTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
}
} else if pkt.IsAudio {
if pkt.Codec == "AAC" {
if err := myMuxer.Write(audioTrack, pkt.Data, ttime, ttime); err != nil {
if err := mp4Video.AddSampleToTrack(audioTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(continuous): " + err.Error())
}
} else if pkt.Codec == "PCM_MULAW" {
@@ -277,7 +328,6 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
}
}
}
pkt = nextPkt
}
@@ -285,21 +335,43 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
// If this happens we need to check to properly close the recording.
if cursorError != nil {
if recordingStatus == "started" {
// This will write the trailer a well.
if err := myMuxer.WriteTrailer(); err != nil {
log.Log.Error(err.Error())
}
log.Log.Info("capture.main.HandleRecordStream(continuous): Recording finished: file save: " + name)
// Cleanup muxer
start = false
file.Close()
file = nil
// Check if need to convert to fragmented using bento
if config.Capture.Fragmented == "true" && config.Capture.FragmentedDuration > 0 {
utils.CreateFragmentedMP4(fullName, config.Capture.FragmentedDuration)
// Update the name of the recording with the duration.
// We will update the name of the recording with the duration in milliseconds.
if mp4Video.VideoTotalDuration > 0 {
duration := mp4Video.VideoTotalDuration
// Update the name with the duration in milliseconds.
startRecordingSeconds := startRecording / 1000 // convert to seconds
startRecordingMilliseconds := startRecording % 1000 // convert to milliseconds
s := strconv.FormatInt(startRecordingSeconds, 10) + "_" +
strconv.Itoa(len(strconv.FormatInt(startRecordingMilliseconds, 10))) + "-" +
strconv.FormatInt(startRecordingMilliseconds, 10) + "_" +
config.Name + "_" +
"0-0-0-0" + "_" + // region coordinates, we
"-1" + "_" + // token
strconv.FormatInt(int64(duration), 10) // + "_" + // duration of recording
//utils.VERSION // version of the agent
oldName := name
name = s + ".mp4"
fullName = configDirectory + "/data/recordings/" + name
log.Log.Info("capture.main.HandleRecordStream(motiondetection): renamed file from: " + oldName + " to: " + name)
// Rename the file to the new name.
err := os.Rename(
configDirectory+"/data/recordings/"+oldName,
configDirectory+"/data/recordings/"+s+".mp4")
if err != nil {
log.Log.Error("capture.main.HandleRecordStream(motiondetection): error renaming file: " + err.Error())
}
} else {
log.Log.Info("capture.main.HandleRecordStream(continuous): no video data recorded, not renaming file.")
}
// Check if we need to encrypt the recording.
@@ -337,32 +409,60 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
log.Log.Info("capture.main.HandleRecordStream(motiondetection): Start motion based recording ")
var lastDuration int64
var lastRecordingTime int64
var lastDuration int64 = 0 // last duration in milliseconds
var lastRecordingTime int64 = 0 // last recording time in milliseconds
var displayTime int64 = 0 // display time in milliseconds
//var cws *cacheWriterSeeker
var myMuxer *mp4.Movmuxer
var videoTrack uint32
var audioTrack uint32
streams, _ := rtspClient.GetVideoStreams()
videoStream := streams[0] // We will use the first video stream, as we only expect one video stream.
for motion := range communication.HandleMotion {
timestamp = time.Now().Unix()
startRecording = time.Now().Unix() // we mark the current time when the record started.
numberOfChanges := motion.NumberOfChanges
// Get as much packets we need.
var cursorError error
var pkt packets.Packet
var nextPkt packets.Packet
recordingCursor := queue.Oldest() // Start from the latest packet in the queue)
timestamp = time.Now().UnixMilli()
startRecording = time.Now().UnixMilli() // we mark the current time when the record started.
// If we have prerecording we will substract the number of seconds.
// Taking into account FPS = GOP size (Keyfram interval)
if config.Capture.PreRecording > 0 {
var preRecordingDelta int64 = 0
if preRecording > 0 {
fps := videoStream.FPS
queueSize := queue.GetSize()
// Based on the GOP size and FPS we can calculate the pre-recording time.
// It might be that the queue size is 0, in that case we will not calculate the pre-recording time.
queuedAvailablePreRecording := preRecording
if queueSize > 0 && fps > 0 {
queuedAvailablePreRecording = int64(queueSize) / int64(fps) * 1000 // convert to milliseconds
}
timeBetweenNowAndLastRecording := startRecording - lastRecordingTime
if lastRecordingTime == 0 {
timeBetweenNowAndLastRecording = 0
}
// Might be that recordings are coming short after each other.
// Therefore we do some math with the current time and the last recording time.
timeBetweenNowAndLastRecording := startRecording - lastRecordingTime
if timeBetweenNowAndLastRecording > int64(config.Capture.PreRecording) {
startRecording = startRecording - int64(config.Capture.PreRecording) + 1
} else {
startRecording = startRecording - timeBetweenNowAndLastRecording
if timeBetweenNowAndLastRecording >= preRecording {
displayTime = startRecording - preRecording + 1000
preRecordingDelta = preRecording
} else if timeBetweenNowAndLastRecording < preRecording {
// If the time between now and the last recording is less than the pre-recording time,
// we will use the pre-recording time.
if lastRecordingTime == 0 && queuedAvailablePreRecording < preRecording {
displayTime = startRecording - queuedAvailablePreRecording
} else {
preRecordingDelta = timeBetweenNowAndLastRecording
displayTime = startRecording - preRecordingDelta
}
}
}
@@ -375,44 +475,59 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
// - Number of changes
// - Token
s := strconv.FormatInt(startRecording, 10) + "_" +
"6" + "-" +
"967003" + "_" +
config.Name + "_" +
"200-200-400-400" + "_" +
strconv.Itoa(numberOfChanges) + "_" +
"769"
displayTimeSeconds := displayTime / 1000 // convert to seconds
displayTimeMilliseconds := displayTime % 1000 // convert to milliseconds
motionRectangleString := "0-0-0-0"
if motion.Rectangle.X != 0 || motion.Rectangle.Y != 0 ||
motion.Rectangle.Width != 0 || motion.Rectangle.Height != 0 {
motionRectangleString = strconv.Itoa(motion.Rectangle.X) + "-" + strconv.Itoa(motion.Rectangle.Y) + "-" +
strconv.Itoa(motion.Rectangle.Width) + "-" + strconv.Itoa(motion.Rectangle.Height)
}
// Get the number of changes from the motion detection.
numberOfChanges := motion.NumberOfChanges
s := strconv.FormatInt(displayTimeSeconds, 10) + "_" + // start timestamp in seconds
strconv.Itoa(len(strconv.FormatInt(displayTimeMilliseconds, 10))) + "-" + // length of milliseconds
strconv.FormatInt(displayTimeMilliseconds, 10) + "_" + // milliseconds
config.Name + "_" + // device name
motionRectangleString + "_" + // region coordinates, we will not use this for continuous recording
strconv.Itoa(numberOfChanges) + "_" + // number of changes
"0" // + "_" + // duration of recording in milliseconds
//utils.VERSION // version of the agent
name := s + ".mp4"
fullName := configDirectory + "/data/recordings/" + name
// Running...
log.Log.Info("capture.main.HandleRecordStream(motiondetection): recording started")
file, _ = os.Create(fullName)
myMuxer, _ = mp4.CreateMp4Muxer(file)
log.Log.Info("capture.main.HandleRecordStream(motiondetection): recording started (" + name + ")" + " at " + strconv.FormatInt(displayTimeSeconds, 10) + " unix")
// Check which video codec we need to use.
videoSteams, _ := rtspClient.GetVideoStreams()
for _, stream := range videoSteams {
width := configuration.Config.Capture.IPCamera.Width
height := configuration.Config.Capture.IPCamera.Height
widthOption := mp4.WithVideoWidth(uint32(width))
heightOption := mp4.WithVideoHeight(uint32(height))
if stream.Name == "H264" {
videoTrack = myMuxer.AddVideoTrack(mp4.MP4_CODEC_H264, widthOption, heightOption)
} else if stream.Name == "H265" {
videoTrack = myMuxer.AddVideoTrack(mp4.MP4_CODEC_H265, widthOption, heightOption)
}
// Get width and height from the camera.
width := configuration.Config.Capture.IPCamera.Width
height := configuration.Config.Capture.IPCamera.Height
// Get SPS and PPS NALUs from the camera.
spsNALUS := configuration.Config.Capture.IPCamera.SPSNALUs
ppsNALUS := configuration.Config.Capture.IPCamera.PPSNALUs
vpsNALUS := configuration.Config.Capture.IPCamera.VPSNALUs
// Create a video file, and set the dimensions.
mp4Video := video.NewMP4(fullName, spsNALUS, ppsNALUS, vpsNALUS)
mp4Video.SetWidth(width)
mp4Video.SetHeight(height)
if videoCodec == "H264" {
videoTrack = mp4Video.AddVideoTrack("H264")
} else if videoCodec == "H265" {
videoTrack = mp4Video.AddVideoTrack("H265")
}
if audioCodec == "AAC" {
audioTrack = mp4Video.AddAudioTrack("AAC")
} else if audioCodec == "PCM_MULAW" {
log.Log.Debug("capture.main.HandleRecordStream(continuous): no AAC audio codec detected, skipping audio track.")
}
// For an MP4 container, AAC is the only audio codec supported.
audioTrack = myMuxer.AddAudioTrack(mp4.MP4_CODEC_AAC)
start := false
// Get as much packets we need.
var cursorError error
var pkt packets.Packet
var nextPkt packets.Packet
recordingCursor := queue.DelayedGopCount(int(config.Capture.PreRecording + 1))
start := false
if cursorError == nil {
pkt, cursorError = recordingCursor.ReadPacket()
@@ -425,68 +540,93 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
log.Log.Error("capture.main.HandleRecordStream(motiondetection): " + cursorError.Error())
}
now := time.Now().Unix()
now := time.Now().UnixMilli()
select {
case motion := <-communication.HandleMotion:
timestamp = now
log.Log.Info("capture.main.HandleRecordStream(motiondetection): motion detected while recording. Expanding recording.")
numberOfChanges = motion.NumberOfChanges
numberOfChanges := motion.NumberOfChanges
log.Log.Info("capture.main.HandleRecordStream(motiondetection): Received message with recording data, detected changes to save: " + strconv.Itoa(numberOfChanges))
default:
}
if (timestamp+recordingPeriod-now < 0 || now-startRecording > maxRecordingPeriod) && nextPkt.IsKeyFrame {
log.Log.Info("capture.main.HandleRecordStream(motiondetection): closing recording (timestamp: " + strconv.FormatInt(timestamp, 10) + ", recordingPeriod: " + strconv.FormatInt(recordingPeriod, 10) + ", now: " + strconv.FormatInt(now, 10) + ", startRecording: " + strconv.FormatInt(startRecording, 10) + ", maxRecordingPeriod: " + strconv.FormatInt(maxRecordingPeriod, 10))
if (timestamp+postRecording+(preRecording-preRecordingDelta)-now < 0 || now-startRecording > maxRecordingPeriod-preRecordingDelta) && nextPkt.IsKeyFrame {
log.Log.Info("capture.main.HandleRecordStream(motiondetection): timestamp+postRecording-now < 0 - " + strconv.FormatInt(timestamp+postRecording-now, 10) + " < 0")
log.Log.Info("capture.main.HandleRecordStream(motiondetection): now-startRecording > maxRecordingPeriod-1000 - " + strconv.FormatInt(now-startRecording, 10) + " > " + strconv.FormatInt(maxRecordingPeriod-1000, 10))
log.Log.Info("capture.main.HandleRecordStream(motiondetection): closing recording (timestamp: " + strconv.FormatInt(timestamp, 10) + ", postRecording: " + strconv.FormatInt(postRecording, 10) + ", now: " + strconv.FormatInt(now, 10) + ", startRecording: " + strconv.FormatInt(startRecording, 10) + ", maxRecordingPeriod: " + strconv.FormatInt(maxRecordingPeriod, 10))
break
}
if pkt.IsKeyFrame && !start && pkt.Time >= lastDuration {
if pkt.IsKeyFrame && !start && (pkt.Time >= lastDuration || pkt.Time == 0) {
// We start the recording if we have a keyframe and the last duration is 0 or less than the current packet time.
// It could be start we start from the beginning of the recording.
log.Log.Debug("capture.main.HandleRecordStream(motiondetection): write frames")
start = true
}
if start {
ttime := convertPTS(pkt.TimeLegacy)
pts := convertPTS(pkt.TimeLegacy)
if pkt.IsVideo {
if err := myMuxer.Write(videoTrack, pkt.Data, ttime, ttime); err != nil {
log.Log.Debug("capture.main.HandleRecordStream(motiondetection): add video sample")
if err := mp4Video.AddSampleToTrack(videoTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(motiondetection): " + err.Error())
}
} else if pkt.IsAudio {
log.Log.Debug("capture.main.HandleRecordStream(motiondetection): add audio sample")
if pkt.Codec == "AAC" {
if err := myMuxer.Write(audioTrack, pkt.Data, ttime, ttime); err != nil {
if err := mp4Video.AddSampleToTrack(audioTrack, pkt.IsKeyFrame, pkt.Data, pts); err != nil {
log.Log.Error("capture.main.HandleRecordStream(motiondetection): " + err.Error())
}
} else if pkt.Codec == "PCM_MULAW" {
// TODO: transcode to AAC, some work to do..
// We might need to use ffmpeg to transcode the audio to AAC.
// For now we will skip the audio track.
log.Log.Debug("capture.main.HandleRecordStream(motiondetection): no AAC audio codec detected, skipping audio track.")
}
}
// We will sync to file every keyframe.
if pkt.IsKeyFrame {
err := file.Sync()
if err != nil {
log.Log.Error("capture.main.HandleRecordStream(motiondetection): " + err.Error())
} else {
log.Log.Debug("capture.main.HandleRecordStream(motiondetection): synced file " + name)
}
}
}
pkt = nextPkt
}
// This will write the trailer a well.
myMuxer.WriteTrailer()
// Update the last duration and last recording time.
// This is used to determine if we need to start a new recording.
lastDuration = pkt.Time
lastRecordingTime = time.Now().UnixMilli()
// This will close the recording and write the last packet.
mp4Video.Close(&config)
log.Log.Info("capture.main.HandleRecordStream(motiondetection): file save: " + name)
lastDuration = pkt.Time
lastRecordingTime = time.Now().Unix()
file.Close()
file = nil
// Update the name of the recording with the duration.
// We will update the name of the recording with the duration in milliseconds.
if mp4Video.VideoTotalDuration > 0 {
duration := mp4Video.VideoTotalDuration
// Check if need to convert to fragmented using bento
if config.Capture.Fragmented == "true" && config.Capture.FragmentedDuration > 0 {
utils.CreateFragmentedMP4(fullName, config.Capture.FragmentedDuration)
// Update the name with the duration in milliseconds.
s := strconv.FormatInt(displayTimeSeconds, 10) + "_" +
strconv.Itoa(len(strconv.FormatInt(displayTimeMilliseconds, 10))) + "-" +
strconv.FormatInt(displayTimeMilliseconds, 10) + "_" +
config.Name + "_" +
motionRectangleString + "_" +
strconv.Itoa(numberOfChanges) + "_" + // number of changes
strconv.FormatInt(int64(duration), 10) // + "_" + // duration of recording in milliseconds
//utils.VERSION // version of the agent
oldName := name
name = s + ".mp4"
fullName = configDirectory + "/data/recordings/" + name
log.Log.Info("capture.main.HandleRecordStream(motiondetection): renamed file from: " + oldName + " to: " + name)
// Rename the file to the new name.
err := os.Rename(
configDirectory+"/data/recordings/"+oldName,
configDirectory+"/data/recordings/"+s+".mp4")
if err != nil {
log.Log.Error("capture.main.HandleRecordStream(motiondetection): error renaming file: " + err.Error())
}
} else {
log.Log.Info("capture.main.HandleRecordStream(motiondetection): no video data recorded, not renaming file.")
}
// Check if we need to encrypt the recording.
@@ -534,6 +674,10 @@ func HandleRecordStream(queue *packets.Queue, configDirectory string, configurat
// @Success 200 {object} models.APIResponse
func VerifyCamera(c *gin.Context) {
// Start OpenTelemetry tracing
ctxVerifyCamera, span := tracer.Start(context.Background(), "VerifyCamera", trace.WithSpanKind(trace.SpanKindServer))
defer span.End()
var cameraStreams models.CameraStreams
err := c.BindJSON(&cameraStreams)
@@ -559,12 +703,11 @@ func VerifyCamera(c *gin.Context) {
Url: rtspUrl,
}
err := rtspClient.Connect(ctx)
err := rtspClient.Connect(ctx, ctxVerifyCamera)
if err == nil {
// Get the streams from the rtsp client.
streams, _ := rtspClient.GetStreams()
videoIdx := -1
audioIdx := -1
for i, stream := range streams {
@@ -575,7 +718,7 @@ func VerifyCamera(c *gin.Context) {
}
}
err := rtspClient.Close()
err := rtspClient.Close(ctxVerifyCamera)
if err == nil {
if videoIdx > -1 {
c.JSON(200, models.APIResponse{
@@ -695,6 +838,6 @@ func convertPTS(v time.Duration) uint64 {
return uint64(v.Milliseconds())
}
func convertPTS2(v int64) uint64 {
/*func convertPTS2(v int64) uint64 {
return uint64(v) / 100
}
}*/

View File

@@ -131,7 +131,7 @@ func HandleUpload(configDirectory string, configuration *models.Configuration, c
log.Log.Error("HandleUpload: " + err.Error())
}
} else {
delay = 20 * time.Second // slow down
delay = 5 * time.Second // slow down
if err != nil {
log.Log.Error("HandleUpload: " + err.Error())
}
@@ -1162,3 +1162,184 @@ func VerifyPersistence(c *gin.Context, configDirectory string) {
})
}
}
// VerifySecondaryPersistence godoc
// @Router /api/persistence/secondary/verify [post]
// @ID verify-persistence
// @Security Bearer
// @securityDefinitions.apikey Bearer
// @in header
// @name Authorization
// @Tags persistence
// @Param config body models.Config true "Config"
// @Summary Will verify the secondary persistence.
// @Description Will verify the secondary persistence.
// @Success 200 {object} models.APIResponse
func VerifySecondaryPersistence(c *gin.Context, configDirectory string) {
var config models.Config
err := c.BindJSON(&config)
if err != nil || config.Cloud != "" {
if config.Cloud == "kstorage" || config.Cloud == "kerberosvault" {
if config.KStorageSecondary == nil {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): please fill-in the required Kerberos Vault credentials."
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
} else {
uri := config.KStorageSecondary.URI
accessKey := config.KStorageSecondary.AccessKey
secretAccessKey := config.KStorageSecondary.SecretAccessKey
directory := config.KStorageSecondary.Directory
provider := config.KStorageSecondary.Provider
if err == nil && uri != "" && accessKey != "" && secretAccessKey != "" {
var client *http.Client
if os.Getenv("AGENT_TLS_INSECURE") == "true" {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client = &http.Client{Transport: tr}
} else {
client = &http.Client{}
}
req, err := http.NewRequest("POST", uri+"/ping", nil)
if err == nil {
req.Header.Add("X-Kerberos-Storage-AccessKey", accessKey)
req.Header.Add("X-Kerberos-Storage-SecretAccessKey", secretAccessKey)
resp, err := client.Do(req)
if err == nil {
body, err := io.ReadAll(resp.Body)
defer resp.Body.Close()
if err == nil && resp.StatusCode == http.StatusOK {
if provider != "" || directory != "" {
// Generate a random name.
timestamp := time.Now().Unix()
fileName := strconv.FormatInt(timestamp, 10) +
"_6-967003_" + config.Name + "_200-200-400-400_24_769.mp4"
// Open test-480p.mp4
file, err := os.Open(configDirectory + "/data/test-480p.mp4")
if err != nil {
msg := "cloud.VerifyPersistence(kerberosvault): error reading test-480p.mp4: " + err.Error()
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
defer file.Close()
req, err := http.NewRequest("POST", uri+"/storage", file)
if err == nil {
req.Header.Set("Content-Type", "video/mp4")
req.Header.Set("X-Kerberos-Storage-CloudKey", config.HubKey)
req.Header.Set("X-Kerberos-Storage-AccessKey", accessKey)
req.Header.Set("X-Kerberos-Storage-SecretAccessKey", secretAccessKey)
req.Header.Set("X-Kerberos-Storage-Provider", provider)
req.Header.Set("X-Kerberos-Storage-FileName", fileName)
req.Header.Set("X-Kerberos-Storage-Device", config.Key)
req.Header.Set("X-Kerberos-Storage-Capture", "IPCamera")
req.Header.Set("X-Kerberos-Storage-Directory", directory)
var client *http.Client
if os.Getenv("AGENT_TLS_INSECURE") == "true" {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client = &http.Client{Transport: tr}
} else {
client = &http.Client{}
}
resp, err := client.Do(req)
if err == nil {
if resp != nil {
body, err := io.ReadAll(resp.Body)
defer resp.Body.Close()
if err == nil {
if resp.StatusCode == 200 {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Upload allowed using the credentials provided (" + accessKey + ", " + secretAccessKey + ")"
log.Log.Info(msg)
c.JSON(200, models.APIResponse{
Data: body,
})
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Something went wrong while verifying your persistence settings. Make sure your provider is the same as the storage provider in your Kerberos Vault, and the relevant storage provider is configured properly."
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
}
}
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Upload of fake recording failed: " + err.Error()
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Something went wrong while creating /storage POST request." + err.Error()
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Provider and/or directory is missing from the request."
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Something went wrong while verifying storage credentials: " + string(body)
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Something went wrong while verifying storage credentials:" + err.Error()
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): Something went wrong while verifying storage credentials:" + err.Error()
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
} else {
msg := "cloud.VerifySecondaryPersistence(kerberosvault): please fill-in the required Kerberos Vault credentials."
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
}
}
} else {
msg := "cloud.VerifySecondaryPersistence(): No persistence was specified, so do not know what to verify:" + err.Error()
log.Log.Error(msg)
c.JSON(400, models.APIResponse{
Data: msg,
})
}
}

View File

@@ -3,14 +3,20 @@ package cloud
import (
"crypto/tls"
"errors"
"io/ioutil"
"io"
"net/http"
"os"
"time"
"github.com/kerberos-io/agent/machinery/src/log"
"github.com/kerberos-io/agent/machinery/src/models"
)
// We will count the number of retries we have done.
// If we have done more than "kstorageRetryPolicy" retries, we will stop, and start sending to the secondary storage.
var kstorageRetryCount = 0
var kstorageRetryTimeout = time.Now().Unix()
func UploadKerberosVault(configuration *models.Configuration, fileName string) (bool, bool, error) {
config := configuration.Config
@@ -19,7 +25,7 @@ func UploadKerberosVault(configuration *models.Configuration, fileName string) (
config.KStorage.SecretAccessKey == "" ||
config.KStorage.Directory == "" ||
config.KStorage.URI == "" {
err := "UploadKerberosVault: Kerberos Vault not properly configured."
err := "UploadKerberosVault: Kerberos Vault not properly configured"
log.Log.Info(err)
return false, false, errors.New(err)
}
@@ -42,64 +48,147 @@ func UploadKerberosVault(configuration *models.Configuration, fileName string) (
defer file.Close()
}
if err != nil {
err := "UploadKerberosVault: Upload Failed, file doesn't exists anymore."
err := "UploadKerberosVault: Upload Failed, file doesn't exists anymore"
log.Log.Info(err)
return false, false, errors.New(err)
}
publicKey := config.KStorage.CloudKey
// This is the new way ;)
if config.HubKey != "" {
publicKey = config.HubKey
}
req, err := http.NewRequest("POST", config.KStorage.URI+"/storage", file)
if err != nil {
errorMessage := "UploadKerberosVault: error reading request, " + config.KStorage.URI + "/storage: " + err.Error()
log.Log.Error(errorMessage)
return false, true, errors.New(errorMessage)
}
req.Header.Set("Content-Type", "video/mp4")
req.Header.Set("X-Kerberos-Storage-CloudKey", publicKey)
req.Header.Set("X-Kerberos-Storage-AccessKey", config.KStorage.AccessKey)
req.Header.Set("X-Kerberos-Storage-SecretAccessKey", config.KStorage.SecretAccessKey)
req.Header.Set("X-Kerberos-Storage-Provider", config.KStorage.Provider)
req.Header.Set("X-Kerberos-Storage-FileName", fileName)
req.Header.Set("X-Kerberos-Storage-Device", config.Key)
req.Header.Set("X-Kerberos-Storage-Capture", "IPCamera")
req.Header.Set("X-Kerberos-Storage-Directory", config.KStorage.Directory)
// We need to check if we are in a retry timeout.
if kstorageRetryTimeout <= time.Now().Unix() {
var client *http.Client
if os.Getenv("AGENT_TLS_INSECURE") == "true" {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
req, err := http.NewRequest("POST", config.KStorage.URI+"/storage", file)
if err != nil {
errorMessage := "UploadKerberosVault: error reading request, " + config.KStorage.URI + "/storage: " + err.Error()
log.Log.Error(errorMessage)
return false, true, errors.New(errorMessage)
}
client = &http.Client{Transport: tr}
} else {
client = &http.Client{}
}
req.Header.Set("Content-Type", "video/mp4")
req.Header.Set("X-Kerberos-Storage-CloudKey", publicKey)
req.Header.Set("X-Kerberos-Storage-AccessKey", config.KStorage.AccessKey)
req.Header.Set("X-Kerberos-Storage-SecretAccessKey", config.KStorage.SecretAccessKey)
req.Header.Set("X-Kerberos-Storage-Provider", config.KStorage.Provider)
req.Header.Set("X-Kerberos-Storage-FileName", fileName)
req.Header.Set("X-Kerberos-Storage-Device", config.Key)
req.Header.Set("X-Kerberos-Storage-Capture", "IPCamera")
req.Header.Set("X-Kerberos-Storage-Directory", config.KStorage.Directory)
resp, err := client.Do(req)
if resp != nil {
defer resp.Body.Close()
}
var client *http.Client
if os.Getenv("AGENT_TLS_INSECURE") == "true" {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client = &http.Client{Transport: tr}
} else {
client = &http.Client{}
}
if err == nil {
resp, err := client.Do(req)
if resp != nil {
body, err := ioutil.ReadAll(resp.Body)
if err == nil {
if resp.StatusCode == 200 {
log.Log.Info("UploadKerberosVault: Upload Finished, " + resp.Status + ", " + string(body))
return true, true, nil
} else {
log.Log.Info("UploadKerberosVault: Upload Failed, " + resp.Status + ", " + string(body))
return false, true, nil
defer resp.Body.Close()
}
if err == nil {
if resp != nil {
body, err := io.ReadAll(resp.Body)
if err == nil {
if resp.StatusCode == 200 {
kstorageRetryCount = 0
log.Log.Info("UploadKerberosVault: Upload Finished, " + resp.Status + ", " + string(body))
return true, true, nil
} else {
// We increase the retry count, and set the timeout.
// If we have reached the retry policy, we set the timeout.
// This means we will not retry for the next 5 minutes.
if kstorageRetryCount < config.KStorage.MaxRetries {
kstorageRetryCount = (kstorageRetryCount + 1)
}
if kstorageRetryCount == config.KStorage.MaxRetries {
kstorageRetryTimeout = time.Now().Add(time.Duration(config.KStorage.Timeout) * time.Second).Unix()
}
log.Log.Info("UploadKerberosVault: Upload Failed, " + resp.Status + ", " + string(body))
}
}
}
} else {
log.Log.Info("UploadKerberosVault: Upload Failed, " + err.Error())
}
}
// We might need to check if we can upload to our secondary storage.
if config.KStorageSecondary.AccessKey == "" ||
config.KStorageSecondary.SecretAccessKey == "" ||
config.KStorageSecondary.Directory == "" ||
config.KStorageSecondary.URI == "" {
log.Log.Info("UploadKerberosVault (Secondary): Secondary Kerberos Vault not properly configured.")
} else {
if kstorageRetryCount < config.KStorage.MaxRetries {
log.Log.Info("UploadKerberosVault (Secondary): Do not upload to secondary storage, we are still in retry policy.")
return false, true, nil
}
log.Log.Info("UploadKerberosVault (Secondary): Uploading to Secondary Kerberos Vault (" + config.KStorageSecondary.URI + ")")
file, err = os.OpenFile(fullname, os.O_RDWR, 0755)
if file != nil {
defer file.Close()
}
if err != nil {
err := "UploadKerberosVault (Secondary): Upload Failed, file doesn't exists anymore"
log.Log.Info(err)
return false, false, errors.New(err)
}
req, err := http.NewRequest("POST", config.KStorageSecondary.URI+"/storage", file)
if err != nil {
errorMessage := "UploadKerberosVault (Secondary): error reading request, " + config.KStorageSecondary.URI + "/storage: " + err.Error()
log.Log.Error(errorMessage)
return false, true, errors.New(errorMessage)
}
req.Header.Set("Content-Type", "video/mp4")
req.Header.Set("X-Kerberos-Storage-CloudKey", publicKey)
req.Header.Set("X-Kerberos-Storage-AccessKey", config.KStorageSecondary.AccessKey)
req.Header.Set("X-Kerberos-Storage-SecretAccessKey", config.KStorageSecondary.SecretAccessKey)
req.Header.Set("X-Kerberos-Storage-Provider", config.KStorageSecondary.Provider)
req.Header.Set("X-Kerberos-Storage-FileName", fileName)
req.Header.Set("X-Kerberos-Storage-Device", config.Key)
req.Header.Set("X-Kerberos-Storage-Capture", "IPCamera")
req.Header.Set("X-Kerberos-Storage-Directory", config.KStorageSecondary.Directory)
var client *http.Client
if os.Getenv("AGENT_TLS_INSECURE") == "true" {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client = &http.Client{Transport: tr}
} else {
client = &http.Client{}
}
resp, err := client.Do(req)
if resp != nil {
defer resp.Body.Close()
}
if err == nil {
if resp != nil {
body, err := io.ReadAll(resp.Body)
if err == nil {
if resp.StatusCode == 200 {
log.Log.Info("UploadKerberosVault (Secondary): Upload Finished to secondary, " + resp.Status + ", " + string(body))
return true, true, nil
} else {
log.Log.Info("UploadKerberosVault (Secondary): Upload Failed to secondary, " + resp.Status + ", " + string(body))
}
}
}
}
}
errorMessage := "UploadKerberosVault: Upload Failed, " + err.Error()
log.Log.Info(errorMessage)
return false, true, errors.New(errorMessage)
return false, true, nil
}

View File

@@ -9,6 +9,7 @@ import (
mqtt "github.com/eclipse/paho.mqtt.golang"
"github.com/gin-gonic/gin"
"go.opentelemetry.io/otel"
"github.com/kerberos-io/agent/machinery/src/capture"
"github.com/kerberos-io/agent/machinery/src/cloud"
@@ -23,9 +24,15 @@ import (
"github.com/tevino/abool"
)
func Bootstrap(configDirectory string, configuration *models.Configuration, communication *models.Communication, captureDevice *capture.Capture) {
var tracer = otel.Tracer("github.com/kerberos-io/agent/machinery/src/components")
func Bootstrap(ctx context.Context, configDirectory string, configuration *models.Configuration, communication *models.Communication, captureDevice *capture.Capture) {
log.Log.Debug("components.Kerberos.Bootstrap(): bootstrapping the kerberos agent.")
bootstrapContext := context.Background()
_, span := tracer.Start(bootstrapContext, "Bootstrap")
// We will keep track of the Kerberos Agent up time
// This is send to Kerberos Hub in a heartbeat.
uptimeStart := time.Now()
@@ -78,6 +85,8 @@ func Bootstrap(configDirectory string, configuration *models.Configuration, comm
// Configure a MQTT client which helps for a bi-directional communication
mqttClient := routers.ConfigureMQTT(configDirectory, configuration, communication)
span.End()
// Run the agent and fire up all the other
// goroutines which do image capture, motion detection, onvif, etc.
for {
@@ -114,6 +123,9 @@ func Bootstrap(configDirectory string, configuration *models.Configuration, comm
func RunAgent(configDirectory string, configuration *models.Configuration, communication *models.Communication, mqttClient mqtt.Client, uptimeStart time.Time, cameraSettings *models.Camera, captureDevice *capture.Capture) string {
ctx := context.Background()
ctxRunAgent, span := tracer.Start(ctx, "RunAgent")
log.Log.Info("components.Kerberos.RunAgent(): Creating camera and processing threads.")
config := configuration.Config
@@ -124,10 +136,10 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
rtspUrl := config.Capture.IPCamera.RTSP
rtspClient := captureDevice.SetMainClient(rtspUrl)
if rtspUrl != "" {
err := rtspClient.Connect(context.Background())
err := rtspClient.Connect(ctx, ctxRunAgent)
if err != nil {
log.Log.Error("components.Kerberos.RunAgent(): error connecting to RTSP stream: " + err.Error())
rtspClient.Close()
rtspClient.Close(ctxRunAgent)
rtspClient = nil
time.Sleep(time.Second * 3)
return status
@@ -145,7 +157,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
videoStreams, err := rtspClient.GetVideoStreams()
if err != nil || len(videoStreams) == 0 {
log.Log.Error("components.Kerberos.RunAgent(): no video stream found, might be the wrong codec (we only support H264 for the moment)")
rtspClient.Close()
rtspClient.Close(ctxRunAgent)
time.Sleep(time.Second * 3)
return status
}
@@ -161,6 +173,12 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
configuration.Config.Capture.IPCamera.Width = width
configuration.Config.Capture.IPCamera.Height = height
// Set the SPS and PPS values in the configuration.
configuration.Config.Capture.IPCamera.SPSNALUs = [][]byte{videoStream.SPS}
configuration.Config.Capture.IPCamera.PPSNALUs = [][]byte{videoStream.PPS}
configuration.Config.Capture.IPCamera.VPSNALUs = [][]byte{videoStream.VPS}
// Define queues for the main and sub stream.
var queue *packets.Queue
var subQueue *packets.Queue
@@ -182,7 +200,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
rtspSubClient := captureDevice.SetSubClient(subRtspUrl)
captureDevice.RTSPSubClient = rtspSubClient
err := rtspSubClient.Connect(context.Background())
err := rtspSubClient.Connect(ctx, ctxRunAgent)
if err != nil {
log.Log.Error("components.Kerberos.RunAgent(): error connecting to RTSP sub stream: " + err.Error())
time.Sleep(time.Second * 3)
@@ -194,7 +212,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
videoSubStreams, err = rtspSubClient.GetVideoStreams()
if err != nil || len(videoSubStreams) == 0 {
log.Log.Error("components.Kerberos.RunAgent(): no video sub stream found, might be the wrong codec (we only support H264 for the moment)")
rtspSubClient.Close()
rtspSubClient.Close(ctxRunAgent)
time.Sleep(time.Second * 3)
return status
}
@@ -217,28 +235,28 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
// Set the maximum GOP count, this is used to determine the pre-recording time.
log.Log.Info("components.Kerberos.RunAgent(): SetMaxGopCount was set with: " + strconv.Itoa(int(config.Capture.PreRecording)+1))
queue.SetMaxGopCount(int(config.Capture.PreRecording) + 1) // GOP time frame is set to prerecording (we'll add 2 gops to leave some room).
queue.SetMaxGopCount(1) // We will adjust this later on, when we have the GOP size.
queue.WriteHeader(videoStreams)
go rtspClient.Start(context.Background(), "main", queue, configuration, communication)
go rtspClient.Start(ctx, "main", queue, configuration, communication)
// Main stream is connected and ready to go.
communication.MainStreamConnected = true
// Try to create backchannel
rtspBackChannelClient := captureDevice.SetBackChannelClient(rtspUrl)
err = rtspBackChannelClient.ConnectBackChannel(context.Background())
err = rtspBackChannelClient.ConnectBackChannel(ctx, ctxRunAgent)
if err == nil {
log.Log.Info("components.Kerberos.RunAgent(): opened RTSP backchannel stream: " + rtspUrl)
go rtspBackChannelClient.StartBackChannel(context.Background())
go rtspBackChannelClient.StartBackChannel(ctx, ctxRunAgent)
}
rtspSubClient := captureDevice.RTSPSubClient
if subStreamEnabled && rtspSubClient != nil {
subQueue = packets.NewQueue()
communication.SubQueue = subQueue
subQueue.SetMaxGopCount(3) // GOP time frame is set to prerecording (we'll add 2 gops to leave some room).
subQueue.SetMaxGopCount(1) // GOP time frame is set to 1 for motion detection and livestreaming.
subQueue.WriteHeader(videoSubStreams)
go rtspSubClient.Start(context.Background(), "sub", subQueue, configuration, communication)
go rtspSubClient.Start(ctx, "sub", subQueue, configuration, communication)
// Sub stream is connected and ready to go.
communication.SubStreamConnected = true
@@ -254,7 +272,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
}
// Handle livestream HD (high resolution over WEBRTC)
communication.HandleLiveHDHandshake = make(chan models.RequestHDStreamPayload, 1)
communication.HandleLiveHDHandshake = make(chan models.RequestHDStreamPayload, 10)
if subStreamEnabled {
livestreamHDCursor := subQueue.Latest()
go cloud.HandleLiveStreamHD(livestreamHDCursor, configuration, communication, mqttClient, rtspSubClient)
@@ -267,7 +285,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
go capture.HandleRecordStream(queue, configDirectory, configuration, communication, rtspClient)
// Handle processing of motion
communication.HandleMotion = make(chan models.MotionDataPartial, 1)
communication.HandleMotion = make(chan models.MotionDataPartial, 10)
if subStreamEnabled {
motionCursor := subQueue.Latest()
go computervision.ProcessMotion(motionCursor, configuration, communication, mqttClient, rtspSubClient)
@@ -289,10 +307,10 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
go cloud.HandleUpload(configDirectory, configuration, communication)
// Handle ONVIF actions
communication.HandleONVIF = make(chan models.OnvifAction, 1)
communication.HandleONVIF = make(chan models.OnvifAction, 10)
go onvif.HandleONVIFActions(configuration, communication)
communication.HandleAudio = make(chan models.AudioDataPartial, 1)
communication.HandleAudio = make(chan models.AudioDataPartial, 10)
if rtspBackChannelClient.HasBackChannel {
communication.HasBackChannel = true
go WriteAudioToBackchannel(communication, rtspBackChannelClient)
@@ -301,6 +319,9 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
// If we reach this point, we have a working RTSP connection.
communication.CameraConnected = true
// Otel end span
span.End()
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// This will go into a blocking state, once this channel is triggered
// the agent will cleanup and restart.
@@ -344,7 +365,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
time.Sleep(time.Second * 3)
err = rtspClient.Close()
err = rtspClient.Close(ctxRunAgent)
if err != nil {
log.Log.Error("components.Kerberos.RunAgent(): error closing RTSP stream: " + err.Error())
time.Sleep(time.Second * 3)
@@ -356,7 +377,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
communication.Queue = nil
if subStreamEnabled {
err = rtspSubClient.Close()
err = rtspSubClient.Close(ctxRunAgent)
if err != nil {
log.Log.Error("components.Kerberos.RunAgent(): error closing RTSP sub stream: " + err.Error())
time.Sleep(time.Second * 3)
@@ -367,7 +388,7 @@ func RunAgent(configDirectory string, configuration *models.Configuration, commu
communication.SubQueue = nil
}
err = rtspBackChannelClient.Close()
err = rtspBackChannelClient.Close(ctxRunAgent)
if err != nil {
log.Log.Error("components.Kerberos.RunAgent(): error closing RTSP backchannel stream: " + err.Error())
}

View File

@@ -21,6 +21,7 @@ func ProcessMotion(motionCursor *packets.QueueCursor, configuration *models.Conf
var isPixelChangeThresholdReached = false
var changesToReturn = 0
var motionRectangle models.MotionRectangle
pixelThreshold := config.Capture.PixelChangeThreshold
// Might not be set in the config file, so set it to 150
@@ -132,7 +133,7 @@ func ProcessMotion(motionCursor *packets.QueueCursor, configuration *models.Conf
if detectMotion {
// Remember additional information about the result of findmotion
isPixelChangeThresholdReached, changesToReturn = FindMotion(imageArray, coordinatesToCheck, pixelThreshold)
isPixelChangeThresholdReached, changesToReturn, motionRectangle = FindMotion(imageArray, coordinatesToCheck, pixelThreshold)
if isPixelChangeThresholdReached {
// If offline mode is disabled, send a message to the hub
@@ -164,6 +165,7 @@ func ProcessMotion(motionCursor *packets.QueueCursor, configuration *models.Conf
dataToPass := models.MotionDataPartial{
Timestamp: time.Now().Unix(),
NumberOfChanges: changesToReturn,
Rectangle: motionRectangle,
}
communication.HandleMotion <- dataToPass //Save data to the channel
}
@@ -185,24 +187,58 @@ func ProcessMotion(motionCursor *packets.QueueCursor, configuration *models.Conf
log.Log.Debug("computervision.main.ProcessMotion(): stop the motion detection.")
}
func FindMotion(imageArray [3]*image.Gray, coordinatesToCheck []int, pixelChangeThreshold int) (thresholdReached bool, changesDetected int) {
func FindMotion(imageArray [3]*image.Gray, coordinatesToCheck []int, pixelChangeThreshold int) (thresholdReached bool, changesDetected int, motionRectangle models.MotionRectangle) {
image1 := imageArray[0]
image2 := imageArray[1]
image3 := imageArray[2]
threshold := 60
changes := AbsDiffBitwiseAndThreshold(image1, image2, image3, threshold, coordinatesToCheck)
return changes > pixelChangeThreshold, changes
changes, motionRectangle := AbsDiffBitwiseAndThreshold(image1, image2, image3, threshold, coordinatesToCheck)
return changes > pixelChangeThreshold, changes, motionRectangle
}
func AbsDiffBitwiseAndThreshold(img1 *image.Gray, img2 *image.Gray, img3 *image.Gray, threshold int, coordinatesToCheck []int) int {
func AbsDiffBitwiseAndThreshold(img1 *image.Gray, img2 *image.Gray, img3 *image.Gray, threshold int, coordinatesToCheck []int) (int, models.MotionRectangle) {
changes := 0
var pixelList [][]int
for i := 0; i < len(coordinatesToCheck); i++ {
pixel := coordinatesToCheck[i]
diff := int(img3.Pix[pixel]) - int(img1.Pix[pixel])
diff2 := int(img3.Pix[pixel]) - int(img2.Pix[pixel])
if (diff > threshold || diff < -threshold) && (diff2 > threshold || diff2 < -threshold) {
changes++
// Store the pixel coordinates where the change is detected
pixelList = append(pixelList, []int{pixel % img1.Bounds().Dx(), pixel / img1.Bounds().Dx()})
}
}
return changes
// Calculate rectangle of pixelList (startX, startY, endX, endY)
var motionRectangle models.MotionRectangle
if len(pixelList) > 0 {
startX := pixelList[0][0]
startY := pixelList[0][1]
endX := startX
endY := startY
for _, pixel := range pixelList {
if pixel[0] < startX {
startX = pixel[0]
}
if pixel[1] < startY {
startY = pixel[1]
}
if pixel[0] > endX {
endX = pixel[0]
}
if pixel[1] > endY {
endY = pixel[1]
}
}
log.Log.Debugf("Rectangle of changes detected: startX: %d, startY: %d, endX: %d, endY: %d", startX, startY, endX, endY)
motionRectangle = models.MotionRectangle{
X: startX,
Y: startY,
Width: endX - startX,
Height: endY - startY,
}
log.Log.Debugf("Motion rectangle: %+v", motionRectangle)
}
return changes, motionRectangle
}

View File

@@ -135,6 +135,12 @@ func OpenConfig(configDirectory string, configuration *models.Configuration) {
conjungo.Merge(&kerberosvault, configuration.CustomConfig.KStorage, opts)
configuration.Config.KStorage = &kerberosvault
// Merge Secondary Kerberos Vault settings
var kerberosvaultSecondary models.KStorage
conjungo.Merge(&kerberosvaultSecondary, configuration.GlobalConfig.KStorageSecondary, opts)
conjungo.Merge(&kerberosvaultSecondary, configuration.CustomConfig.KStorageSecondary, opts)
configuration.Config.KStorageSecondary = &kerberosvaultSecondary
// Merge Kerberos S3 settings
var s3 models.S3
conjungo.Merge(&s3, configuration.GlobalConfig.S3, opts)
@@ -183,15 +189,19 @@ func OpenConfig(configDirectory string, configuration *models.Configuration) {
}
jsonFile.Close()
}
}
return
}
// This function will override the configuration with environment variables.
func OverrideWithEnvironmentVariables(configuration *models.Configuration) {
environmentVariables := os.Environ()
// Initialize the configuration for some new fields.
if configuration.Config.KStorageSecondary == nil {
configuration.Config.KStorageSecondary = &models.KStorage{}
}
for _, env := range environmentVariables {
if strings.Contains(env, "AGENT_") {
key := strings.Split(env, "=")[0]
@@ -436,7 +446,7 @@ func OverrideWithEnvironmentVariables(configuration *models.Configuration) {
configuration.Config.S3.Region = value
break
/* When storing in a Kerberos Vault */
/* When storing in a Vault */
case "AGENT_KERBEROSVAULT_URI":
configuration.Config.KStorage.URI = value
break
@@ -453,6 +463,37 @@ func OverrideWithEnvironmentVariables(configuration *models.Configuration) {
configuration.Config.KStorage.Directory = value
break
/* Retry policy and timeout */
case "AGENT_KERBEROSVAULT_MAX_RETRIES":
maxRetries, err := strconv.Atoi(value)
if err == nil {
configuration.Config.KStorage.MaxRetries = maxRetries
}
break
case "AGENT_KERBEROSVAULT_TIMEOUT":
timeout, err := strconv.Atoi(value)
if err == nil {
configuration.Config.KStorage.Timeout = timeout
}
break
/* When storing in a secondary Vault */
case "AGENT_KERBEROSVAULT_SECONDARY_URI":
configuration.Config.KStorageSecondary.URI = value
break
case "AGENT_KERBEROSVAULT_SECONDARY_ACCESS_KEY":
configuration.Config.KStorageSecondary.AccessKey = value
break
case "AGENT_KERBEROSVAULT_SECONDARY_SECRET_KEY":
configuration.Config.KStorageSecondary.SecretAccessKey = value
break
case "AGENT_KERBEROSVAULT_SECONDARY_PROVIDER":
configuration.Config.KStorageSecondary.Provider = value
break
case "AGENT_KERBEROSVAULT_SECONDARY_DIRECTORY":
configuration.Config.KStorageSecondary.Directory = value
break
/* When storing in dropbox */
case "AGENT_DROPBOX_ACCESS_TOKEN":
configuration.Config.Dropbox.AccessToken = value
@@ -478,9 +519,26 @@ func OverrideWithEnvironmentVariables(configuration *models.Configuration) {
case "AGENT_ENCRYPTION_SYMMETRIC_KEY":
configuration.Config.Encryption.SymmetricKey = value
break
/* When signing is enabled */
case "AGENT_SIGNING":
configuration.Config.Signing.Enabled = value
break
case "AGENT_SIGNING_PRIVATE_KEY":
signingPrivateKey := strings.ReplaceAll(value, "\\n", "\n")
configuration.Config.Signing.PrivateKey = signingPrivateKey
break
}
}
}
// Signing is a new feature, so if empty we set default values.
if configuration.Config.Signing == nil || configuration.Config.Signing.PrivateKey == "" {
configuration.Config.Signing = &models.Signing{
Enabled: "true",
PrivateKey: "-----BEGIN PRIVATE KEY-----\nMIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQDoSxjyw08lRxF4Yoqmcaewjq3XjB55dMy4tlN5MGLdr8aAPuNR9Mwh3jlh1bDpwQXNgZkHDV/q9bpdPGGi7SQo2xw+rDuo5Y1f3wdzz+iuCTPbzoGFalE+1PZlU5TEtUtlbt7MRc4pxTaLP3u0P3EtW3KnzcUarcJWZJYxzv7gqVNCA/47BN+1ptqjwz3LAlah5yaftEvVjkaANOsafUswbS4VT44XfSlbKgebORCKDuNgQiyhuV5gU+J0TOaqRWwwMAWV0UoScyJLfhHRBCrUwrCUTwqH9jfkB7pgRFsYoZJd4MKMeHJjFSum+QXCBqInSnwu8c2kJChiLMWqJ+mhpTdfUAmSkeUSStfbbcavIPbDABvMgzOcmYMIVXXe57twU0xdu3AqWLtc9kw1BkUgZblM9pSSpYrIDheEyMs2/hiLgXsIaM0nVQtqwrA7rbeEGuPblzA6hvHgwN9K6HaBqdlGSlpYZ0v3SWIMwmxRB+kIojlyuggm8Qa4mqL97GFDGl6gOBGlNUFTBUVEa3EaJ7NJpGobRGsh/9dXzcW4aYmT9WxlzTlIKksI1ro6KdRfuVWfEs4AnG8bVEJmofK8EUrueB9IdXlcJZB49xolnOZPFohtMe/0U7evQOQP3sZnX+KotCsE7OXJvL09oF58JKoqmK9lPp0+pFBU4g6NjQIDAQABAoICAA+RSWph1t+q5R3nxUxFTYMrhv5IjQe2mDxJpF3B409zolC9OHxgGUisobTY3pBqs0DtKbxUeH2A0ehUH/axEosWHcz3cmIbgxHE9kdlJ9B3Lmss6j/uw+PWutu1sgm5phaIFIvuNNRWhPB6yXUwU4sLRat1+Z9vTmIQiKdtLIrtJz/n2VDvrJxn1N+yAsE20fnrksFKyZuxVsJaZPiX/t5Yv1/z0LjFjVoL7GUA5/Si7csN4ftqEhUrkNr2BvcZlTyffrF4lZCXrtl76RNUaxhqIu3H0gFbV2UfBpuckkfAhNRpXJ4iFSxm4nQbk4ojV8+l21RFOBeDN2Z7Ocu6auP5MnzpopR66vmDCmPoid498VGgDzFQEVkOar8WAa4v9h85QgLKrth6FunmaWJUT6OggQD3yY58GSwp5+ARMETMBP2x6Eld+PGgqoJvPT1+l/e9gOw7/SJ+Wz6hRXZAm/eiXMppHtB7sfea5rscNanPjJkK9NvPM0MX9cq/iA6QjXuETkMbubjo+Cxk3ydZiIQmWQDAx/OgxTyHbeRCVhLPcAphX0clykCuHZpI9Mvvj643/LoE0mjTByWJXf/WuGJA8ElHkjSdokVJ7jumz8OZZHfq0+V7+la2opsObeQANHW5MLWrnHlRVzTGV0IRZDXh7h1ptUJ4ubdvw/GJ2NeTAoIBAQD0lXXdjYKWC4uZ4YlgydP8b1CGda9cBV5RcPt7q9Ya1R2E4ieYyohmzltopvdaOXdsTZzhtdzOzKF+2qNcbBKhBTleYZ8GN5RKbo7HwXWpzfCTjseKHOD/QPwvBKXzLVWNtXn1NrLR79Rv0wbkYF6DtoqpEPf5kMs4bx79yW+mz8FUgdEeMjKphx6Jd5RYlTUxS64K6bnK7gjHNCF2cwdxsh4B6EB649GKeNz4JXi+oQBmOcX5ncXnkJrbju+IjtCkQ40HINVNdX7XeEaaw6KGaImVjw61toPUuDaioYUojufayoyXaUJnDbHQ2tNekEpq5iwnenZCbUKWmSeRe7dLAoIBAQDzIscYujsrmPxiTj2prhG0v36NRNP99mShnnJGowiIs+UBS0EMdOmBFa2sC9uFs/VnreQNYPDJdfr7O5VK9kfbH/PSiiKJ+wVebfdAlWkJYH27JN2Kl2l/OsvRVelNvF3BWIYF46qzGxIM0axaz3T2ZAJ9SrUgeAYhak6uyM4fbexEWXxDgPGu6C0jB6IAzmHJnnh+j5+4ZXqjVyUxBYtUsWXF/TXomVcT9jxj7aUmS2/Us0XTVOVNpALqqYcekrzsX/wX0OEi5HkivYXHcNaDHx3NuUf6KdYof5DwPUM76qe+5/kWlSIHP3M6rIFK3pYFUnkHn2E8jNWcO97Aio+HAoIBAA+bcff/TbPxbKkXIUMR3fsfx02tONFwbkJYKVQM9Q6lRsrx+4Dee7HDvUWCUgpp3FsG4NnuVvbDTBLiNMZzBwVLZgvFwvYMmePeBjJs/+sj/xQLamQ/z4O6S91cOJK589mlGPEy2lpXKYExQCFWnPFetp5vPMOqH62sOZgMQJmubDHOTt/UaDM1Mhenj8nPS6OnpqV/oKF4awr7Ip+CW5k/unZ4sZSl8PsbF06mZXwUngfn6+Av1y8dpSQZjONz6ZBx1w/7YmEc/EkXnbnGfhqBlTX7+P5TdTofvyzFjc+2vsjRYANRbjFRSGWBcTd5kaYcpfim8eDvQ+6EO2gnMt0CggEAH2ln1Y8B5AEQ4lZ/avOdP//ZhsDUrqPtnl/NHckkahzrwj4JumVEYbP+SxMBGoYEd4+kvgG/OhfvBBRPlm65G9tF8fZ8vdzbdba5UfO7rUV1GP+LS8OCErjy6imySaPDbR5Vul8Oh7NAor1YCidxUf/bvnovanF3QUvtvHEfCDp4YuA4yLPZBaLjaforePUw9w5tPNSravRZYs74dBvmQ1vj7S9ojpN5B5AxfyuNwaPPX+iFZec69MvywISEe3Ozysof1Kfc3lgsOkvIA9tVK32SqSh93xkWnQbWH+OaUxxe7bAko0FDMzKEXZk53wVg1nEwR8bUljEPy+6EOdXs8wKCAQEAsEOWYMY5m7HkeG2XTTvX7ECmmdGl/c4ZDVwzB4IPxqUG7XfLmtsON8YoKOEUpJoc4ANafLXzmU+esUGbH4Ph22IWgP9jzws7jxaN/Zoku64qrSjgEZFTRIpKyhFk/ImWbS9laBW4l+m0tqTTRqoE0QEJf/2uv/04q65zrA70X9z2+KTrAtqOiRQPWl/IxRe9U4OEeGL+oD+YlXKCDsnJ3rwUIOZgJx0HWZg7K35DKwqs1nVi56FBdljiTRKAjVLRedjgDCSfGS1yUZ3krHzpaPt1qgnT3rdtYcIdbYDr66V2/gEEaz6XMGHuTk/ewjzUJxq9UTVeXOCbkRPXgVJg1w==\n-----END PRIVATE KEY-----",
}
}
}
func SaveConfig(configDirectory string, config models.Config, configuration *models.Configuration, communication *models.Communication) error {

View File

@@ -138,6 +138,16 @@ func (self *Logging) Debug(sentence string) {
}
}
func (self *Logging) Debugf(format string, args ...interface{}) {
switch self.Logger {
case "go-logging":
gologging.Debugf(format, args...)
case "logrus":
logrus.Debugf(format, args...)
default:
}
}
func (self *Logging) Error(sentence string) {
switch self.Logger {
case "go-logging":

View File

@@ -28,6 +28,7 @@ type Config struct {
Cloud string `json:"cloud" bson:"cloud"`
S3 *S3 `json:"s3,omitempty" bson:"s3,omitempty"`
KStorage *KStorage `json:"kstorage,omitempty" bson:"kstorage,omitempty"`
KStorageSecondary *KStorage `json:"kstorage_secondary,omitempty" bson:"kstorage_secondary,omitempty"`
Dropbox *Dropbox `json:"dropbox,omitempty" bson:"dropbox,omitempty"`
MQTTURI string `json:"mqtturi" bson:"mqtturi,omitempty"`
MQTTUsername string `json:"mqtt_username" bson:"mqtt_username"`
@@ -45,6 +46,7 @@ type Config struct {
HubSite string `json:"hub_site" bson:"hub_site"`
ConditionURI string `json:"condition_uri" bson:"condition_uri"`
Encryption *Encryption `json:"encryption,omitempty" bson:"encryption,omitempty"`
Signing *Signing `json:"signing,omitempty" bson:"signing,omitempty"`
RealtimeProcessing string `json:"realtimeprocessing,omitempty" bson:"realtimeprocessing,omitempty"`
RealtimeProcessingTopic string `json:"realtimeprocessing_topic" bson:"realtimeprocessing_topic"`
}
@@ -63,6 +65,7 @@ type Capture struct {
Continuous string `json:"continuous,omitempty"`
PostRecording int64 `json:"postrecording"`
PreRecording int64 `json:"prerecording"`
GopSize int `json:"gopsize,omitempty" bson:"gopsize,omitempty"` // GOP size in seconds, used for pre-recording
MaxLengthRecording int64 `json:"maxlengthrecording"`
TranscodingWebRTC string `json:"transcodingwebrtc"`
TranscodingResolution int64 `json:"transcodingresolution"`
@@ -75,18 +78,23 @@ type Capture struct {
// IPCamera configuration, such as the RTSP url of the IPCamera and the FPS.
// Also includes ONVIF integration
type IPCamera struct {
RTSP string `json:"rtsp"`
Width int `json:"width"`
Height int `json:"height"`
FPS string `json:"fps"`
SubRTSP string `json:"sub_rtsp"`
SubWidth int `json:"sub_width"`
SubHeight int `json:"sub_height"`
SubFPS string `json:"sub_fps"`
ONVIF string `json:"onvif,omitempty" bson:"onvif"`
ONVIFXAddr string `json:"onvif_xaddr" bson:"onvif_xaddr"`
ONVIFUsername string `json:"onvif_username" bson:"onvif_username"`
ONVIFPassword string `json:"onvif_password" bson:"onvif_password"`
RTSP string `json:"rtsp"`
Width int `json:"width"`
Height int `json:"height"`
FPS string `json:"fps"`
SubRTSP string `json:"sub_rtsp"`
SubWidth int `json:"sub_width"`
SubHeight int `json:"sub_height"`
SubFPS string `json:"sub_fps"`
ONVIF string `json:"onvif,omitempty" bson:"onvif"`
ONVIFXAddr string `json:"onvif_xaddr" bson:"onvif_xaddr"`
ONVIFUsername string `json:"onvif_username" bson:"onvif_username"`
ONVIFPassword string `json:"onvif_password" bson:"onvif_password"`
SPSNALUs [][]byte `json:"sps_nalus,omitempty" bson:"sps_nalus,omitempty"`
PPSNALUs [][]byte `json:"pps_nalus,omitempty" bson:"pps_nalus,omitempty"`
VPSNALUs [][]byte `json:"vps_nalus,omitempty" bson:"vps_nalus,omitempty"`
SampleRate int `json:"sample_rate,omitempty" bson:"sample_rate,omitempty"`
Channels int `json:"channels,omitempty" bson:"channels,omitempty"`
}
// USBCamera configuration, such as the device path (/dev/video*)
@@ -158,6 +166,8 @@ type KStorage struct {
SecretAccessKey string `json:"secret_access_key,omitempty" bson:"secret_access_key,omitempty"`
Provider string `json:"provider,omitempty" bson:"provider,omitempty"`
Directory string `json:"directory,omitempty" bson:"directory,omitempty"`
MaxRetries int `json:"max_retries,omitempty" bson:"max_retries,omitempty"`
Timeout int `json:"timeout,omitempty" bson:"timeout,omitempty"`
}
// Dropbox integration
@@ -174,3 +184,9 @@ type Encryption struct {
PrivateKey string `json:"private_key" bson:"private_key"`
SymmetricKey string `json:"symmetric_key" bson:"symmetric_key"`
}
// Signing
type Signing struct {
Enabled string `json:"enabled" bson:"enabled"`
PrivateKey string `json:"private_key" bson:"private_key"`
}

View File

@@ -1,8 +1,9 @@
package models
type MotionDataPartial struct {
Timestamp int64 `json:"timestamp" bson:"timestamp"`
NumberOfChanges int `json:"numberOfChanges" bson:"numberOfChanges"`
Timestamp int64 `json:"timestamp" bson:"timestamp"`
NumberOfChanges int `json:"numberOfChanges" bson:"numberOfChanges"`
Rectangle MotionRectangle `json:"rectangle" bson:"rectangle"`
}
type MotionDataFull struct {
@@ -14,3 +15,10 @@ type MotionDataFull struct {
NumberOfChanges int `json:"numberOfChanges" bson:"numberOfChanges"`
Token int `json:"token" bson:"token"`
}
type MotionRectangle struct {
X int `json:"x" bson:"x"`
Y int `json:"y" bson:"y"`
Width int `json:"width" bson:"width"`
Height int `json:"height" bson:"height"`
}

View File

@@ -18,4 +18,5 @@ type Packet struct {
Time int64 // packet decode time
TimeLegacy time.Duration
Data []byte // packet data
Gopsize int // size of the GOP
}

View File

@@ -45,6 +45,11 @@ func (self *Queue) SetMaxGopCount(n int) {
return
}
func (self *Queue) GetMaxGopCount() int {
n := self.maxgopcount
return n
}
func (self *Queue) WriteHeader(streams []Stream) error {
self.lock.Lock()

View File

@@ -1,6 +1,9 @@
package packets
type Stream struct {
// The ID of the stream.
Index int `json:"index" bson:"index"`
// The name of the stream.
Name string
@@ -39,4 +42,13 @@ type Stream struct {
// IsBackChannel is true if this stream is a back channel.
IsBackChannel bool
// SampleRate is the sample rate of the audio stream.
SampleRate int
// Channels is the number of audio channels.
Channels int
// GopSize is the size of the GOP (Group of Pictures).
GopSize int
}

View File

@@ -54,16 +54,21 @@ func AddRoutes(r *gin.Engine, authMiddleware *jwt.GinJWTMiddleware, configDirect
components.UpdateConfig(c, configDirectory, configuration, communication)
})
// Will verify the current hub settings.
// Will verify the hub settings.
api.POST("/hub/verify", func(c *gin.Context) {
cloud.VerifyHub(c)
})
// Will verify the current persistence settings.
// Will verify the persistence settings.
api.POST("/persistence/verify", func(c *gin.Context) {
cloud.VerifyPersistence(c, configDirectory)
})
// Will verify the secondary persistence settings.
api.POST("/persistence/secondary/verify", func(c *gin.Context) {
cloud.VerifySecondaryPersistence(c, configDirectory)
})
// Camera specific methods. Doesn't require any authorization.
// These are available for anyone, but require the agent, to reach
// the camera.

View File

@@ -391,7 +391,8 @@ func HandleRequestConfig(mqttClient mqtt.Client, hubKey string, payload models.P
// We need a fix for the width and height if a substream.
// The ROI requires the width and height of the sub stream.
if configuration.Config.Capture.IPCamera.SubRTSP != "" {
if configuration.Config.Capture.IPCamera.SubRTSP != "" &&
configuration.Config.Capture.IPCamera.SubRTSP != configuration.Config.Capture.IPCamera.RTSP {
deepCopy.Capture.IPCamera.Width = configuration.Config.Capture.IPCamera.SubWidth
deepCopy.Capture.IPCamera.Height = configuration.Config.Capture.IPCamera.SubHeight
}

992
machinery/src/video/mp4.go Normal file
View File

@@ -0,0 +1,992 @@
package video
import (
"bufio"
"bytes"
"crypto/rsa"
"crypto/x509"
"encoding/pem"
"errors"
"fmt"
"io/ioutil"
"os"
"strings"
"time"
mp4ff "github.com/Eyevinn/mp4ff/mp4"
"github.com/kerberos-io/agent/machinery/src/encryption"
"github.com/kerberos-io/agent/machinery/src/log"
"github.com/kerberos-io/agent/machinery/src/models"
"github.com/kerberos-io/agent/machinery/src/utils"
)
var LastPTS uint64 = 0 // Last PTS for the current segment
type MP4 struct {
// FileName is the name of the file
FileName string
width int
height int
Segments []*mp4ff.MediaSegment // List of media segments
Segment *mp4ff.MediaSegment
MultiTrackFragment *mp4ff.Fragment
TrackIDs []uint32
FileWriter *os.File
Writer *bufio.Writer
SegmentCount int
SampleCount int
StartPTS uint64
VideoTotalDuration uint64
AudioTotalDuration uint64
AudioPTS uint64
Start bool
SPSNALUs [][]byte // SPS NALUs for H264
PPSNALUs [][]byte // PPS NALUs for H264
VPSNALUs [][]byte // VPS NALUs for H264
FreeBoxSize int64
MoofBoxes int64 // Number of moof boxes in the file
MoofBoxSizes []int64 // Sizes of each moof box
StartTime uint64 // Start time of the MP4 file
VideoTrackName string // Name of the video track
VideoTrack int // Track ID for the video track
AudioTrackName string // Name of the audio track
AudioTrack int // Track ID for the audio track
VideoFullSample *mp4ff.FullSample // Full sample for video track
AudioFullSample *mp4ff.FullSample // Full sample for audio track
LastAudioSampleDTS uint64 // Last PTS for audio sample
LastVideoSampleDTS uint64 // Last PTS for video sample
SampleType string // Type of the sample (e.g., "video", "audio", "subtitle")
}
// NewMP4 creates a new MP4 object
func NewMP4(fileName string, spsNALUs [][]byte, ppsNALUs [][]byte, vpsNALUs [][]byte) *MP4 {
init := mp4ff.NewMP4Init()
// Add a free box to the init segment
// Prepend a free box to the init segment with a size of 1000
freeBoxSize := 2048
free := mp4ff.NewFreeBox(make([]byte, freeBoxSize))
init.AddChild(free)
// Create a writer
ofd, err := os.Create(fileName)
if err != nil {
panic(err)
}
// Create a buffered writer
bufferedWriter := bufio.NewWriterSize(ofd, 64*1024) // 64KB buffer
// We will write the empty init segment to the file
// so we can overwrite it later with the actual init segment.
err = init.Encode(bufferedWriter)
if err != nil {
panic(err)
}
return &MP4{
FileName: fileName,
StartTime: uint64(time.Now().Unix()),
FreeBoxSize: int64(freeBoxSize),
FileWriter: ofd,
Writer: bufferedWriter,
SPSNALUs: spsNALUs,
PPSNALUs: ppsNALUs,
VPSNALUs: vpsNALUs,
}
}
// SetWidth sets the width of the video
func (mp4 *MP4) SetWidth(width int) {
// Set the width of the video
mp4.width = width
}
// SetHeight sets the height of the video
func (mp4 *MP4) SetHeight(height int) {
// Set the height of the video
mp4.height = height
}
// AddVideoTrack
// Add a video track to the MP4 file
func (mp4 *MP4) AddVideoTrack(codec string) uint32 {
nextTrack := uint32(len(mp4.TrackIDs) + 1)
mp4.VideoTrack = int(nextTrack)
mp4.TrackIDs = append(mp4.TrackIDs, nextTrack)
mp4.VideoTrackName = codec
return nextTrack
}
// AddAudioTrack
// Add an audio track to the MP4 file
func (mp4 *MP4) AddAudioTrack(codec string) uint32 {
nextTrack := uint32(len(mp4.TrackIDs) + 1)
mp4.AudioTrack = int(nextTrack)
mp4.TrackIDs = append(mp4.TrackIDs, nextTrack)
mp4.AudioTrackName = codec
return nextTrack
}
func (mp4 *MP4) AddMediaSegment(segNr int) {
}
func (mp4 *MP4) AddSampleToTrack(trackID uint32, isKeyframe bool, data []byte, pts uint64) error {
if isKeyframe {
// Write the segment to the file
if mp4.Start {
mp4.MoofBoxes = mp4.MoofBoxes + 1
mp4.MoofBoxSizes = append(mp4.MoofBoxSizes, int64(mp4.Segment.Size()))
err := mp4.Segment.Encode(mp4.Writer)
if err != nil {
return err
}
mp4.Segments = append(mp4.Segments, mp4.Segment)
}
mp4.Start = true
// Increment the segment count
mp4.SegmentCount = mp4.SegmentCount + 1
// Create a new media segment
seg := mp4ff.NewMediaSegment()
// Create a video fragment
multiTrackFragment, err := mp4ff.CreateMultiTrackFragment(uint32(mp4.SegmentCount), mp4.TrackIDs) // Assuming 1 for video track and 2 for audio track
if err != nil {
}
mp4.MultiTrackFragment = multiTrackFragment
seg.AddFragment(multiTrackFragment)
// Set to MP4 struct
mp4.Segment = seg
// Set the start PTS for the next segment
mp4.StartPTS = pts
}
if mp4.Start {
if trackID == uint32(mp4.VideoTrack) {
var lengthPrefixed []byte
var err error
if mp4.VideoTrackName == "H264" || mp4.VideoTrackName == "AVC1" { // Convert Annex B to length-prefixed NAL units if H264
lengthPrefixed, err = annexBToLengthPrefixed(data)
} else if mp4.VideoTrackName == "H265" || mp4.VideoTrackName == "HVC1" { // Convert H265 Annex B to length-prefixed NAL units
lengthPrefixed, err = annexBToLengthPrefixed(data)
}
if err == nil {
if mp4.VideoFullSample != nil {
duration := pts - mp4.VideoFullSample.DecodeTime
log.Log.Debug("Adding sample to track " + fmt.Sprintf("%d, PTS: %d, Duration: %d, size: %d, Keyframe: %t", trackID, pts, duration, len(lengthPrefixed), isKeyframe))
mp4.LastVideoSampleDTS = duration
//fmt.Printf("Adding sample to track %d, PTS: %d, Duration: %d, size: %d, Keyframe: %t\n", trackID, pts, duration, len(mp4.VideoFullSample.Data), isKeyframe)
mp4.VideoTotalDuration += duration
mp4.VideoFullSample.DecodeTime = mp4.VideoTotalDuration - duration
mp4.VideoFullSample.Sample.Dur = uint32(duration)
err := mp4.MultiTrackFragment.AddFullSampleToTrack(*mp4.VideoFullSample, trackID)
if err != nil {
//log.Printf("Error adding sample to track %d: %v", trackID, err)
return err
}
}
// Set the sample data
var fullSample mp4ff.FullSample
flags := uint32(33554432)
if !isKeyframe {
flags = uint32(16842752)
}
fullSample.DecodeTime = pts
fullSample.Data = lengthPrefixed
fullSample.Sample = mp4ff.Sample{
Size: uint32(len(fullSample.Data)),
Flags: flags,
CompositionTimeOffset: 0, // No composition time offset for video
}
mp4.VideoFullSample = &fullSample
mp4.SampleType = "video"
}
} else if trackID == uint32(mp4.AudioTrack) {
if mp4.AudioFullSample != nil {
SplitAACFrame(mp4.AudioFullSample.Data, func(started bool, aac []byte) {
sampleToAdd := *mp4.AudioFullSample
dts := pts - mp4.AudioFullSample.DecodeTime
if pts < mp4.AudioFullSample.DecodeTime {
//log.Printf("Warning: PTS %d is less than previous sample's DecodeTime %d, resetting AudioFullSample", pts, mp4.AudioFullSample.DecodeTime)
dts = 1
}
if started {
dts = 1
}
mp4.LastAudioSampleDTS = dts
//fmt.Printf("Adding sample to track %d, PTS: %d, Duration: %d, size: %d\n", trackID, pts, dts, len(aac[7:]))
mp4.AudioTotalDuration += dts
mp4.AudioPTS += dts
sampleToAdd.Data = aac[7:] // Remove the ADTS header (first 7 bytes)
sampleToAdd.DecodeTime = mp4.AudioPTS - dts
sampleToAdd.Sample.Dur = uint32(dts)
sampleToAdd.Sample.Size = uint32(len(aac[7:]))
err := mp4.MultiTrackFragment.AddFullSampleToTrack(sampleToAdd, trackID)
if err != nil {
log.Log.Error("mp4.AddSampleToTrack(): error adding sample to track " + fmt.Sprintf("%d: %v", trackID, err))
}
})
}
// Set the sample data
//flags := uint32(33554432)
var fullSample mp4ff.FullSample
fullSample.DecodeTime = pts
fullSample.Data = data
fullSample.Sample = mp4ff.Sample{
Size: uint32(len(fullSample.Data)),
Flags: 0,
CompositionTimeOffset: 0, // No composition time offset for audio
}
mp4.AudioFullSample = &fullSample
mp4.SampleType = "audio"
}
}
return nil
}
func (mp4 *MP4) Close(config *models.Config) {
// Add the last sample to the track, we will predict the duration based on the last sample
// We are not insert the last sample as we might corrupt playback (as we do not know accurately the next PTS).
// In theory it means we will lose the last sample, so there is millisecond dataloss, but it is better than corrupting playback.
// We could this by using a delayed packet reader, and look for the next PTS (closest one), but that would require a lot of memory and CPU.
/*duration := uint64(0)
trackID := uint32(1)
if mp4.SampleType == "video" {
duration = mp4.LastVideoSampleDTS
trackID = uint32(mp4.VideoTrack)
} else if mp4.SampleType == "audio" {
duration = 21 //mp4.LastAudioSampleDTS
} else {
log.Println("mp4.Close(): unknown sample type, cannot calculate duration")
}
if duration > 0 {
mp4.VideoTotalDuration += duration
mp4.VideoFullSample.DecodeTime = mp4.VideoTotalDuration - duration
mp4.VideoFullSample.Sample.Dur = uint32(duration)
err := mp4.MultiTrackFragment.AddFullSampleToTrack(*mp4.VideoFullSample, trackID)
if err != nil {
}
mp4.Segments = append(mp4.Segments, mp4.Segment)
}*/
if mp4.VideoTotalDuration == 0 && mp4.AudioTotalDuration == 0 {
log.Log.Error("mp4.Close(): no video or audio samples added, cannot create MP4 file")
}
// Encode the last segment
if mp4.Segment != nil {
err := mp4.Segment.Encode(mp4.Writer)
if err != nil {
panic(err)
}
}
mp4.Writer.Flush()
defer mp4.FileWriter.Close()
// Now we have all the moof and mdat boxes written to the file.
// We can now generate the ftyp and moov boxes, and replace it with the free box we added earlier (size of 10008 bytes).
init := mp4ff.NewMP4Init()
// Create a new ftyp box
majorBrand := "isom"
minorVersion := uint32(512)
compatibleBrands := []string{"iso2", "avc1", "hvc1", "mp41"}
ftyp := mp4ff.NewFtyp(majorBrand, minorVersion, compatibleBrands)
init.AddChild(ftyp)
// Create a new moov box
moov := mp4ff.NewMoovBox()
init.AddChild(moov)
// Set the creation time and modification time for the moov box
videoTimescale := uint32(1000)
audioTimescale := uint32(1000)
mvhd := &mp4ff.MvhdBox{
Version: 0,
Flags: 0,
CreationTime: mp4.StartTime,
ModificationTime: mp4.StartTime,
Timescale: videoTimescale,
Duration: mp4.VideoTotalDuration,
}
init.Moov.AddChild(mvhd)
// Set the total duration in the moov box
mvex := mp4ff.NewMvexBox()
mvex.AddChild(&mp4ff.MehdBox{FragmentDuration: int64(mp4.VideoTotalDuration)})
init.Moov.AddChild(mvex)
// Add a track for the video
if mp4.VideoTrackName == "H264" || mp4.VideoTrackName == "AVC1" {
init.AddEmptyTrack(videoTimescale, "video", "und")
includePS := true
err := init.Moov.Traks[0].SetAVCDescriptor("avc1", mp4.SPSNALUs, mp4.PPSNALUs, includePS)
if err != nil {
//panic(err)
}
init.Moov.Traks[0].Tkhd.Duration = mp4.VideoTotalDuration
init.Moov.Traks[0].Mdia.Hdlr.Name = "agent " + utils.VERSION
//init.Moov.Traks[0].Mdia.Mdhd.Duration = mp4.VideoTotalDuration
} else if mp4.VideoTrackName == "H265" || mp4.VideoTrackName == "HVC1" {
init.AddEmptyTrack(videoTimescale, "video", "und")
includePS := true
err := init.Moov.Traks[0].SetHEVCDescriptor("hvc1", mp4.VPSNALUs, mp4.SPSNALUs, mp4.PPSNALUs, [][]byte{}, includePS)
if err != nil {
//panic(err)
}
init.Moov.Traks[0].Tkhd.Duration = mp4.VideoTotalDuration
init.Moov.Traks[0].Mdia.Hdlr.Name = "agent " + utils.VERSION
//init.Moov.Traks[0].Mdia.Mdhd.Duration = mp4.VideoTotalDuration
}
// Try adding audio track if available
if mp4.AudioTrackName == "AAC" || mp4.AudioTrackName == "MP4A" {
// Add an audio track to the moov box
init.AddEmptyTrack(audioTimescale, "audio", "und")
// Check if the same sample rate is set, otherwise we default to 48000
audioSampleRate := 48000
if config.Capture.IPCamera.SampleRate > 0 {
audioSampleRate = config.Capture.IPCamera.SampleRate
}
// Set the audio descriptor
err := init.Moov.Traks[1].SetAACDescriptor(29, audioSampleRate)
if err != nil {
//panic(err)
}
init.Moov.Traks[1].Tkhd.Duration = mp4.AudioTotalDuration
init.Moov.Traks[1].Mdia.Hdlr.Name = "agent " + utils.VERSION
//init.Moov.Traks[1].Mdia.Mdhd.Duration = mp4.AudioTotalDuration
}
// Try adding subtitle track if available
if mp4.VideoTrackName == "VTT" || mp4.VideoTrackName == "WebVTT" {
// Add a subtitle track to the moov box
init.AddEmptyTrack(videoTimescale, "subtitle", "und")
// Set the subtitle descriptor
err := init.Moov.Traks[2].SetWvttDescriptor("")
if err != nil {
//log.Log.Error("mp4.Close(): error setting VTT descriptor: " + err.Error())
//return
}
init.Moov.Traks[2].Mdia.Hdlr.Name = "agent " + utils.VERSION
}
// We will create a fingerprint that's be encrypted with the public key, so we can verify the integrity of the file later.
// The fingerprint will be a UUID box, which is a custom box that we can use to store the fingerprint.
// Following fields are included in the fingerprint (UUID):
// - Moov.Mvhd.CreationTime (the time the file was created)
// - Moov.Mvhd.Duration (the total duration of the video)
// - Moov.Trak.Hdlr.Name // (the name of the handler, which is the agent and version)
// - len(Moof) // (the number of moof boxes in the file)
// - size(Moof1) // (the size of the first moof box)
// - size(Moof2) // (the size of the second moof box)
// ..
//
// All attributes of the fingerprint are concatenated into a single string, which is then hashed using SHA-256
// and encrypted with the public key.
fingerprint := fmt.Sprintf("%d", init.Moov.Mvhd.CreationTime) + "_" +
fmt.Sprintf("%d", init.Moov.Mvhd.Duration) + "_" +
init.Moov.Trak.Mdia.Hdlr.Name + "_" +
fmt.Sprintf("%d", mp4.MoofBoxes) + "_" // Number of moof boxes
for i, size := range mp4.MoofBoxSizes {
fingerprint += fmt.Sprintf("%d", size)
if i < len(mp4.MoofBoxSizes)-1 {
fingerprint += "_"
}
}
// Remove trailing underscore if present
if len(fingerprint) > 0 && fingerprint[len(fingerprint)-1] == '_' {
fingerprint = fingerprint[:len(fingerprint)-1]
}
// Load the private key from the configuration
privateKey := config.Signing.PrivateKey
r := strings.NewReader(privateKey)
pemBytes, _ := ioutil.ReadAll(r)
block, _ := pem.Decode(pemBytes)
if block == nil {
//log.Log.Error("mp4.Close(): error decoding PEM block containing private key")
//return
} else {
// Parse private key
b := block.Bytes
key, err := x509.ParsePKCS8PrivateKey(b)
if err != nil {
//log.Log.Error("mp4.Close(): error parsing private key: " + err.Error())
//return
} else {
// Conver key to *rsa.PrivateKey
rsaKey, _ := key.(*rsa.PrivateKey)
fingerprintBytes := []byte(fingerprint)
signature, err := encryption.SignWithPrivateKey(fingerprintBytes, rsaKey)
if err == nil && len(signature) > 0 {
uuid := &mp4ff.UUIDBox{}
uuid.SetUUID("6b0c1f8e-3d2a-4f5b-9c7d-8f1e2b3c4d5e")
uuid.UnknownPayload = signature
init.Moov.AddChild(uuid)
} else {
//log.Log.Error("mp4.Close(): error signing fingerprint: " + err.Error())
}
}
}
// We will also calculate the SIDX box, which is a segment index box that contains information about the segments in the file.
// This is useful for seeking in the file, and for streaming the file.
/*sidx := &mp4ff.SidxBox{
Version: 0,
Flags: 0,
ReferenceID: 0,
Timescale: videoTimescale,
EarliestPresentationTime: 0,
FirstOffset: 0,
SidxRefs: make([]mp4ff.SidxRef, 0),
}
referenceTrak := init.Moov.Trak
trex, ok := init.Moov.Mvex.GetTrex(referenceTrak.Tkhd.TrackID)
if !ok {
// We have an issue.
}
segDatas, err := findSegmentData(mp4.Segments, referenceTrak, trex)
if err != nil {
// We have an issue.
}
fillSidx(sidx, referenceTrak, segDatas, true)
// Add the SIDX box to the moov box
init.AddChild(sidx)*/
// Get a bit slice writer for the init segment
// Get a byte buffer of 10008 bytes to write the init segment
buffer := bytes.NewBuffer(make([]byte, 0))
init.Encode(buffer)
// The first 10008 bytes of the file is a free box, so we can read it and replace it with the moov box.
// The init box might not be 10008 bytes, so we need to read the first 10008 bytes and then replace it with the moov box.
// while the remaining bytes are for a new free box.
// Write the init segment at the beginning of the file, replacing the free box
if _, err := mp4.FileWriter.WriteAt(buffer.Bytes(), 0); err != nil {
panic(err)
}
// Calculate the remaining size for the free box
remainingSize := mp4.FreeBoxSize - int64(buffer.Len())
if remainingSize > 0 {
newFreeBox := mp4ff.NewFreeBox(make([]byte, remainingSize))
var freeBuf bytes.Buffer
if err := newFreeBox.Encode(&freeBuf); err != nil {
panic(err)
}
if _, err := mp4.FileWriter.WriteAt(freeBuf.Bytes(), int64(buffer.Len())); err != nil {
panic(err)
}
}
}
type segData struct {
startPos uint64
presentationTime uint64
baseDecodeTime uint64
dur uint32
size uint32
}
func fillSidx(sidx *mp4ff.SidxBox, refTrak *mp4ff.TrakBox, segDatas []segData, nonZeroEPT bool) {
ept := uint64(0)
if nonZeroEPT {
ept = segDatas[0].presentationTime
}
sidx.Version = 1
sidx.Timescale = refTrak.Mdia.Mdhd.Timescale
sidx.ReferenceID = 1
sidx.EarliestPresentationTime = ept
sidx.FirstOffset = 0
sidx.SidxRefs = make([]mp4ff.SidxRef, 0, len(segDatas))
for _, segData := range segDatas {
size := segData.size
sidx.SidxRefs = append(sidx.SidxRefs, mp4ff.SidxRef{
ReferencedSize: size,
SubSegmentDuration: segData.dur,
StartsWithSAP: 1,
SAPType: 1,
})
}
}
// findSegmentData returns a slice of segment media data using a reference track.
func findSegmentData(segs []*mp4ff.MediaSegment, refTrak *mp4ff.TrakBox, trex *mp4ff.TrexBox) ([]segData, error) {
segDatas := make([]segData, 0, len(segs))
for _, seg := range segs {
var firstCompositionTimeOffest int64
dur := uint32(0)
var baseTime uint64
for fIdx, frag := range seg.Fragments {
for _, traf := range frag.Moof.Trafs {
tfhd := traf.Tfhd
if tfhd.TrackID == refTrak.Tkhd.TrackID { // Find track that gives sidx time values
if fIdx == 0 {
baseTime = traf.Tfdt.BaseMediaDecodeTime()
}
for i, trun := range traf.Truns {
trun.AddSampleDefaultValues(tfhd, trex)
samples := trun.GetSamples()
for j, sample := range samples {
if fIdx == 0 && i == 0 && j == 0 {
firstCompositionTimeOffest = int64(sample.CompositionTimeOffset)
}
dur += sample.Dur
}
}
}
}
}
sd := segData{
startPos: seg.StartPos,
presentationTime: uint64(int64(baseTime) + firstCompositionTimeOffest),
baseDecodeTime: baseTime,
dur: dur,
size: uint32(seg.Size()),
}
segDatas = append(segDatas, sd)
}
return segDatas, nil
}
// annexBToLengthPrefixed converts Annex B formatted H264 data (with start codes)
// into length-prefixed NAL units (4-byte length before each NAL unit).
func annexBToLengthPrefixed(data []byte) ([]byte, error) {
var out bytes.Buffer
// Find start codes and split NAL units
nalus := splitNALUs(data)
if len(nalus) == 0 {
return nil, fmt.Errorf("no NAL units found")
}
for _, nalu := range nalus {
// Remove Annex B start codes (0x000001 or 0x00000001) from the beginning of each NALU
nalu = removeAnnexBStartCode(nalu)
if len(nalu) == 0 {
continue
}
// Write 4-byte big-endian length
length := uint32(len(nalu))
lenBytes := []byte{
byte(length >> 24),
byte(length >> 16),
byte(length >> 8),
byte(length),
}
out.Write(lenBytes)
out.Write(nalu)
}
return out.Bytes(), nil
}
// removeAnnexBStartCode removes a leading Annex B start code from a NALU if present.
func removeAnnexBStartCode(nalu []byte) []byte {
if len(nalu) >= 4 && nalu[0] == 0x00 && nalu[1] == 0x00 {
if nalu[2] == 0x01 {
return nalu[3:]
}
if nalu[2] == 0x00 && nalu[3] == 0x01 {
return nalu[4:]
}
}
return nalu
}
// splitNALUs splits Annex B data into raw NAL units without start codes.
func splitNALUs(data []byte) [][]byte {
var nalus [][]byte
start := 0
for start < len(data) {
// Find next start code (0x000001 or 0x00000001)
i := findStartCode(data, start+3)
if i < 0 {
// Last NALU till end of data
nalus = append(nalus, data[start:])
break
}
// NAL unit is between start and i
nalus = append(nalus, data[start:i])
start = i
}
return nalus
}
// findStartCode returns the index of the next Annex B start code (0x000001 or 0x00000001) after pos, or -1 if none.
func findStartCode(data []byte, pos int) int {
for i := pos; i+3 < len(data); i++ {
if data[i] == 0x00 && data[i+1] == 0x00 {
if data[i+2] == 0x01 {
return i
}
if i+3 < len(data) && data[i+2] == 0x00 && data[i+3] == 0x01 {
return i
}
}
}
return -1
}
// FindSyncword searches for the AAC syncword (0xFFF0) in the given byte slice starting from the specified offset.
func FindSyncword(aac []byte, offset int) int {
for i := offset; i < len(aac)-1; i++ {
if aac[i] == 0xFF && aac[i+1]&0xF0 == 0xF0 {
return i
}
}
return -1
}
// Table 31 Profiles
// index profile
// 0 Main profile
// 1 Low Complexity profile (LC)
// 2 Scalable Sampling Rate profile (SSR)
// 3 (reserved)
type START_CODE_TYPE int
const (
START_CODE_3 START_CODE_TYPE = 3
START_CODE_4 START_CODE_TYPE = 4
)
func FindStartCode(nalu []byte, offset int) (int, START_CODE_TYPE) {
idx := bytes.Index(nalu[offset:], []byte{0x00, 0x00, 0x01})
switch {
case idx > 0:
if nalu[offset+idx-1] == 0x00 {
return offset + idx - 1, START_CODE_4
}
fallthrough
case idx == 0:
return offset + idx, START_CODE_3
}
return -1, START_CODE_3
}
func SplitFrame(frames []byte, onFrame func(nalu []byte) bool) {
beg, sc := FindStartCode(frames, 0)
for beg >= 0 {
end, sc2 := FindStartCode(frames, beg+int(sc))
if end == -1 {
if onFrame != nil {
onFrame(frames[beg+int(sc):])
}
break
}
if onFrame != nil && onFrame(frames[beg+int(sc):end]) == false {
break
}
beg = end
sc = sc2
}
}
func SplitFrameWithStartCode(frames []byte, onFrame func(nalu []byte) bool) {
beg, sc := FindStartCode(frames, 0)
for beg >= 0 {
end, sc2 := FindStartCode(frames, beg+int(sc))
if end == -1 {
if onFrame != nil && (beg+int(sc)) < len(frames) {
onFrame(frames[beg:])
}
break
}
if onFrame != nil && (beg+int(sc)) < end && onFrame(frames[beg:end]) == false {
break
}
beg = end
sc = sc2
}
}
func SplitAACFrame(frames []byte, onFrame func(started bool, aac []byte)) {
var adts ADTS_Frame_Header
start := FindSyncword(frames, 0)
started := false
for start >= 0 {
adts.Decode(frames[start:])
onFrame(started, frames[start:start+int(adts.Variable_Header.Frame_length)])
start = FindSyncword(frames, start+int(adts.Variable_Header.Frame_length))
started = true
}
}
type AAC_PROFILE int
const (
MAIN AAC_PROFILE = iota
LC
SSR
)
type AAC_SAMPLING_FREQUENCY int
const (
AAC_SAMPLE_96000 AAC_SAMPLING_FREQUENCY = iota
AAC_SAMPLE_88200
AAC_SAMPLE_64000
AAC_SAMPLE_48000
AAC_SAMPLE_44100
AAC_SAMPLE_32000
AAC_SAMPLE_24000
AAC_SAMPLE_22050
AAC_SAMPLE_16000
AAC_SAMPLE_12000
AAC_SAMPLE_11025
AAC_SAMPLE_8000
AAC_SAMPLE_7350
)
var AAC_Sampling_Idx [13]int = [13]int{96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350}
// Table 4 Syntax of adts_sequence()
// adts_sequence() {
// while (nextbits() == syncword) {
// adts_frame();
// }
// }
// Table 5 Syntax of adts_frame()
// adts_frame() {
// adts_fixed_header();
// adts_variable_header();
// if (number_of_raw_data_blocks_in_frame == 0) {
// adts_error_check();
// raw_data_block();
// }
// else {
// adts_header_error_check();
// for (i = 0; i <= number_of_raw_data_blocks_in_frame;i++ {
// raw_data_block();
// adts_raw_data_block_error_check();
// }
// }
// }
// adts_fixed_header()
// {
// syncword; 12 bslbf
// ID; 1 bslbf
// layer; 2 uimsbf
// protection_absent; 1 bslbf
// profile; 2 uimsbf
// sampling_frequency_index; 4 uimsbf
// private_bit; 1 bslbf
// channel_configuration; 3 uimsbf
// original/copy; 1 bslbf
// home; 1 bslbf
// }
type ADTS_Fix_Header struct {
ID uint8
Layer uint8
Protection_absent uint8
Profile uint8
Sampling_frequency_index uint8
Private_bit uint8
Channel_configuration uint8
Originalorcopy uint8
Home uint8
}
// adts_variable_header() {
// copyright_identification_bit; 1 bslbf
// copyright_identification_start; 1 bslbf
// frame_length; 13 bslbf
// adts_buffer_fullness; 11 bslbf
// number_of_raw_data_blocks_in_frame; 2 uimsfb
// }
type ADTS_Variable_Header struct {
Copyright_identification_bit uint8
copyright_identification_start uint8
Frame_length uint16
Adts_buffer_fullness uint16
Number_of_raw_data_blocks_in_frame uint8
}
type ADTS_Frame_Header struct {
Fix_Header ADTS_Fix_Header
Variable_Header ADTS_Variable_Header
}
func NewAdtsFrameHeader() *ADTS_Frame_Header {
return &ADTS_Frame_Header{
Fix_Header: ADTS_Fix_Header{
ID: 0,
Layer: 0,
Protection_absent: 1,
Profile: uint8(MAIN),
Sampling_frequency_index: uint8(AAC_SAMPLE_44100),
Private_bit: 0,
Channel_configuration: 0,
Originalorcopy: 0,
Home: 0,
},
Variable_Header: ADTS_Variable_Header{
copyright_identification_start: 0,
Copyright_identification_bit: 0,
Frame_length: 0,
Adts_buffer_fullness: 0,
Number_of_raw_data_blocks_in_frame: 0,
},
}
}
func (frame *ADTS_Frame_Header) Decode(aac []byte) {
_ = aac[6]
frame.Fix_Header.ID = aac[1] >> 3
frame.Fix_Header.Layer = aac[1] >> 1 & 0x03
frame.Fix_Header.Protection_absent = aac[1] & 0x01
frame.Fix_Header.Profile = aac[2] >> 6 & 0x03
frame.Fix_Header.Sampling_frequency_index = aac[2] >> 2 & 0x0F
frame.Fix_Header.Private_bit = aac[2] >> 1 & 0x01
frame.Fix_Header.Channel_configuration = (aac[2] & 0x01 << 2) | (aac[3] >> 6)
frame.Fix_Header.Originalorcopy = aac[3] >> 5 & 0x01
frame.Fix_Header.Home = aac[3] >> 4 & 0x01
frame.Variable_Header.Copyright_identification_bit = aac[3] >> 3 & 0x01
frame.Variable_Header.copyright_identification_start = aac[3] >> 2 & 0x01
frame.Variable_Header.Frame_length = (uint16(aac[3]&0x03) << 11) | (uint16(aac[4]) << 3) | (uint16(aac[5]>>5) & 0x07)
frame.Variable_Header.Adts_buffer_fullness = (uint16(aac[5]&0x1F) << 6) | uint16(aac[6]>>2)
frame.Variable_Header.Number_of_raw_data_blocks_in_frame = aac[6] & 0x03
}
func (frame *ADTS_Frame_Header) Encode() []byte {
var hdr []byte
if frame.Fix_Header.Protection_absent == 1 {
hdr = make([]byte, 7)
} else {
hdr = make([]byte, 9)
}
hdr[0] = 0xFF
hdr[1] = 0xF0
hdr[1] = hdr[1] | (frame.Fix_Header.ID << 3) | (frame.Fix_Header.Layer << 1) | frame.Fix_Header.Protection_absent
hdr[2] = frame.Fix_Header.Profile<<6 | frame.Fix_Header.Sampling_frequency_index<<2 | frame.Fix_Header.Private_bit<<1 | frame.Fix_Header.Channel_configuration>>2
hdr[3] = frame.Fix_Header.Channel_configuration<<6 | frame.Fix_Header.Originalorcopy<<5 | frame.Fix_Header.Home<<4
hdr[3] = hdr[3] | frame.Variable_Header.copyright_identification_start<<3 | frame.Variable_Header.Copyright_identification_bit<<2 | byte(frame.Variable_Header.Frame_length<<11)
hdr[4] = byte(frame.Variable_Header.Frame_length >> 3)
hdr[5] = byte((frame.Variable_Header.Frame_length&0x07)<<5) | byte(frame.Variable_Header.Adts_buffer_fullness>>3)
hdr[6] = byte(frame.Variable_Header.Adts_buffer_fullness&0x3F<<2) | frame.Variable_Header.Number_of_raw_data_blocks_in_frame
return hdr
}
func SampleToAACSampleIndex(sampling int) int {
for i, v := range AAC_Sampling_Idx {
if v == sampling {
return i
}
}
panic("not Found AAC Sample Index")
}
func AACSampleIdxToSample(idx int) int {
return AAC_Sampling_Idx[idx]
}
// +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
// | audio object type(5 bits) | sampling frequency index(4 bits) | channel configuration(4 bits) | GA framelength flag(1 bits) | GA Depends on core coder(1 bits) | GA Extension Flag(1 bits) |
// +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
type AudioSpecificConfiguration struct {
Audio_object_type uint8
Sample_freq_index uint8
Channel_configuration uint8
GA_framelength_flag uint8
GA_depends_on_core_coder uint8
GA_extension_flag uint8
}
func NewAudioSpecificConfiguration() *AudioSpecificConfiguration {
return &AudioSpecificConfiguration{
Audio_object_type: 0,
Sample_freq_index: 0,
Channel_configuration: 0,
GA_framelength_flag: 0,
GA_depends_on_core_coder: 0,
GA_extension_flag: 0,
}
}
func (asc *AudioSpecificConfiguration) Encode() []byte {
buf := make([]byte, 2)
buf[0] = (asc.Audio_object_type & 0x1f << 3) | (asc.Sample_freq_index & 0x0F >> 1)
buf[1] = (asc.Sample_freq_index & 0x0F << 7) | (asc.Channel_configuration & 0x0F << 3) | (asc.GA_framelength_flag & 0x01 << 2) | (asc.GA_depends_on_core_coder & 0x01 << 1) | (asc.GA_extension_flag & 0x01)
return buf
}
func (asc *AudioSpecificConfiguration) Decode(buf []byte) error {
if len(buf) < 2 {
return errors.New("len of buf < 2 ")
}
asc.Audio_object_type = buf[0] >> 3
asc.Sample_freq_index = (buf[0] & 0x07 << 1) | (buf[1] >> 7)
asc.Channel_configuration = buf[1] >> 3 & 0x0F
asc.GA_framelength_flag = buf[1] >> 2 & 0x01
asc.GA_depends_on_core_coder = buf[1] >> 1 & 0x01
asc.GA_extension_flag = buf[1] & 0x01
return nil
}
func ConvertADTSToASC(frame []byte) (*AudioSpecificConfiguration, error) {
if len(frame) < 7 {
return nil, errors.New("len of frame < 7")
}
adts := NewAdtsFrameHeader()
adts.Decode(frame)
asc := NewAudioSpecificConfiguration()
asc.Audio_object_type = adts.Fix_Header.Profile + 1
asc.Channel_configuration = adts.Fix_Header.Channel_configuration
asc.Sample_freq_index = adts.Fix_Header.Sampling_frequency_index
return asc, nil
}
func ConvertASCToADTS(asc []byte, aacbytes int) (*ADTS_Frame_Header, error) {
aac_asc := NewAudioSpecificConfiguration()
err := aac_asc.Decode(asc)
if err != nil {
return nil, err
}
aac_adts := NewAdtsFrameHeader()
aac_adts.Fix_Header.Profile = aac_asc.Audio_object_type - 1
aac_adts.Fix_Header.Channel_configuration = aac_asc.Channel_configuration
aac_adts.Fix_Header.Sampling_frequency_index = aac_asc.Sample_freq_index
aac_adts.Fix_Header.Protection_absent = 1
aac_adts.Variable_Header.Adts_buffer_fullness = 0x3F
aac_adts.Variable_Header.Frame_length = uint16(aacbytes)
return aac_adts, nil
}

View File

@@ -16,6 +16,8 @@ import (
"github.com/kerberos-io/agent/machinery/src/packets"
mqtt "github.com/eclipse/paho.mqtt.golang"
"github.com/pion/interceptor"
"github.com/pion/interceptor/pkg/intervalpli"
pionWebRTC "github.com/pion/webrtc/v4"
pionMedia "github.com/pion/webrtc/v4/pkg/media"
)
@@ -70,7 +72,7 @@ func RegisterCandidates(key string, candidate models.ReceiveHDCandidatesPayload)
CandidatesMutex.Lock()
_, ok := CandidateArrays[key]
if !ok {
CandidateArrays[key] = make(chan string)
CandidateArrays[key] = make(chan string, 100)
}
log.Log.Info("webrtc.main.HandleReceiveHDCandidates(): " + candidate.Candidate)
select {
@@ -81,6 +83,19 @@ func RegisterCandidates(key string, candidate models.ReceiveHDCandidatesPayload)
CandidatesMutex.Unlock()
}
func RegisterDefaultInterceptors(mediaEngine *pionWebRTC.MediaEngine, interceptorRegistry *interceptor.Registry) error {
if err := pionWebRTC.ConfigureNack(mediaEngine, interceptorRegistry); err != nil {
return err
}
if err := pionWebRTC.ConfigureRTCPReports(interceptorRegistry); err != nil {
return err
}
if err := pionWebRTC.ConfigureSimulcastExtensionHeaders(mediaEngine); err != nil {
return err
}
return nil
}
func InitializeWebRTCConnection(configuration *models.Configuration, communication *models.Communication, mqttClient mqtt.Client, videoTrack *pionWebRTC.TrackLocalStaticSample, audioTrack *pionWebRTC.TrackLocalStaticSample, handshake models.RequestHDStreamPayload) {
config := configuration.Config
@@ -95,7 +110,7 @@ func InitializeWebRTCConnection(configuration *models.Configuration, communicati
CandidatesMutex.Lock()
_, ok := CandidateArrays[sessionKey]
if !ok {
CandidateArrays[sessionKey] = make(chan string)
CandidateArrays[sessionKey] = make(chan string, 100)
}
CandidatesMutex.Unlock()
@@ -114,7 +129,31 @@ func InitializeWebRTCConnection(configuration *models.Configuration, communicati
log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong registering codecs for media engine: " + err.Error())
}
api := pionWebRTC.NewAPI(pionWebRTC.WithMediaEngine(mediaEngine))
// Create a InterceptorRegistry. This is the user configurable RTP/RTCP Pipeline.
// This provides NACKs, RTCP Reports and other features. If you use `webrtc.NewPeerConnection`
// this is enabled by default. If you are manually managing You MUST create a InterceptorRegistry
// for each PeerConnection.
interceptorRegistry := &interceptor.Registry{}
// Use the default set of Interceptors
if err := pionWebRTC.RegisterDefaultInterceptors(mediaEngine, interceptorRegistry); err != nil {
panic(err)
}
// Register a intervalpli factory
// This interceptor sends a PLI every 3 seconds. A PLI causes a video keyframe to be generated by the sender.
// This makes our video seekable and more error resilent, but at a cost of lower picture quality and higher bitrates
// A real world application should process incoming RTCP packets from viewers and forward them to senders
intervalPliFactory, err := intervalpli.NewReceiverInterceptor()
if err != nil {
panic(err)
}
interceptorRegistry.Add(intervalPliFactory)
api := pionWebRTC.NewAPI(
pionWebRTC.WithMediaEngine(mediaEngine),
pionWebRTC.WithInterceptorRegistry(interceptorRegistry),
)
policy := pionWebRTC.ICETransportPolicyAll
if config.ForceTurn == "true" {
@@ -139,13 +178,36 @@ func InitializeWebRTCConnection(configuration *models.Configuration, communicati
if err == nil && peerConnection != nil {
if _, err = peerConnection.AddTrack(videoTrack); err != nil {
var videoSender *pionWebRTC.RTPSender = nil
if videoSender, err = peerConnection.AddTrack(videoTrack); err != nil {
log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong while adding video track: " + err.Error())
}
// Read incoming RTCP packets
// Before these packets are returned they are processed by interceptors. For things
// like NACK this needs to be called.
go func() {
rtcpBuf := make([]byte, 1500)
for {
if _, _, rtcpErr := videoSender.Read(rtcpBuf); rtcpErr != nil {
return
}
}
}()
if _, err = peerConnection.AddTrack(audioTrack); err != nil {
var audioSender *pionWebRTC.RTPSender = nil
if audioSender, err = peerConnection.AddTrack(audioTrack); err != nil {
log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong while adding audio track: " + err.Error())
}
} // Read incoming RTCP packets
// Before these packets are returned they are processed by interceptors. For things
// like NACK this needs to be called.
go func() {
rtcpBuf := make([]byte, 1500)
for {
if _, _, rtcpErr := audioSender.Read(rtcpBuf); rtcpErr != nil {
return
}
}
}()
peerConnection.OnConnectionStateChange(func(connectionState pionWebRTC.PeerConnectionState) {
if connectionState == pionWebRTC.PeerConnectionStateDisconnected || connectionState == pionWebRTC.PeerConnectionStateClosed {
@@ -157,26 +219,23 @@ func InitializeWebRTCConnection(configuration *models.Configuration, communicati
close(CandidateArrays[sessionKey])
delete(CandidateArrays, sessionKey)
}
// Not really needed.
//senders := peerConnection.GetSenders()
//for _, sender := range senders {
// if err := peerConnection.RemoveTrack(sender); err != nil {
// log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong while removing track: " + err.Error())
// }
//}
if err := peerConnection.Close(); err != nil {
log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong while closing peer connection: " + err.Error())
}
peerConnections[handshake.SessionID] = nil
delete(peerConnections, handshake.SessionID)
CandidatesMutex.Unlock()
} else if connectionState == pionWebRTC.PeerConnectionStateConnected {
CandidatesMutex.Lock()
atomic.AddInt64(&peerConnectionCount, 1)
CandidatesMutex.Unlock()
} else if connectionState == pionWebRTC.PeerConnectionStateConnecting {
// Iterate over the candidates and send them to the remote client
// Non blocking channe
for candidate := range CandidateArrays[sessionKey] {
CandidatesMutex.Lock()
log.Log.Info("webrtc.main.InitializeWebRTCConnection(): Received candidate from channel: " + candidate)
if candidateErr := peerConnection.AddICECandidate(pionWebRTC.ICECandidateInit{Candidate: string(candidate)}); candidateErr != nil {
log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong while adding candidate: " + candidateErr.Error())
}
CandidatesMutex.Unlock()
}
} else if connectionState == pionWebRTC.PeerConnectionStateFailed {
log.Log.Info("webrtc.main.InitializeWebRTCConnection(): ICEConnectionStateFailed")
}
@@ -184,6 +243,19 @@ func InitializeWebRTCConnection(configuration *models.Configuration, communicati
log.Log.Info("webrtc.main.InitializeWebRTCConnection(): Number of peers connected (" + strconv.FormatInt(peerConnectionCount, 10) + ")")
})
go func() {
// Iterate over the candidates and send them to the remote client
// Non blocking channe
for candidate := range CandidateArrays[sessionKey] {
CandidatesMutex.Lock()
log.Log.Info(">>>> webrtc.main.InitializeWebRTCConnection(): Received candidate from channel: " + candidate)
if candidateErr := peerConnection.AddICECandidate(pionWebRTC.ICECandidateInit{Candidate: string(candidate)}); candidateErr != nil {
log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong while adding candidate: " + candidateErr.Error())
}
CandidatesMutex.Unlock()
}
}()
offer := w.CreateOffer(sd)
if err = peerConnection.SetRemoteDescription(offer); err != nil {
log.Log.Error("webrtc.main.InitializeWebRTCConnection(): something went wrong while setting remote description: " + err.Error())
@@ -322,8 +394,8 @@ func WriteToTrack(livestreamCursor *packets.QueueCursor, configuration *models.C
var cursorError error
var pkt packets.Packet
var previousTimeVideo int64
var previousTimeAudio int64
var lastAudioSample *pionMedia.Sample = nil
var lastVideoSample *pionMedia.Sample = nil
start := false
receivedKeyFrame := false
@@ -381,25 +453,27 @@ func WriteToTrack(livestreamCursor *packets.QueueCursor, configuration *models.C
if pkt.IsVideo {
// Calculate the difference
bufferDuration := pkt.Time - previousTimeVideo
previousTimeVideo = pkt.Time
// Start at the first keyframe
if pkt.IsKeyFrame {
start = true
}
if start {
bufferDurationCasted := time.Duration(bufferDuration) * time.Millisecond
sample := pionMedia.Sample{Data: pkt.Data, Duration: bufferDurationCasted, PacketTimestamp: uint32(pkt.Time)}
sample := pionMedia.Sample{Data: pkt.Data, PacketTimestamp: uint32(pkt.Time)}
//sample = pionMedia.Sample{Data: pkt.Data, Duration: time.Second}
if config.Capture.ForwardWebRTC == "true" {
// We will send the video to a remote peer
// TODO..
} else {
if err := videoTrack.WriteSample(sample); err != nil && err != io.ErrClosedPipe {
log.Log.Error("webrtc.main.WriteToTrack(): something went wrong while writing sample: " + err.Error())
if lastVideoSample != nil {
duration := sample.PacketTimestamp - lastVideoSample.PacketTimestamp
bufferDurationCasted := time.Duration(duration) * time.Millisecond
lastVideoSample.Duration = bufferDurationCasted
if err := videoTrack.WriteSample(*lastVideoSample); err != nil && err != io.ErrClosedPipe {
log.Log.Error("webrtc.main.WriteToTrack(): something went wrong while writing sample: " + err.Error())
}
}
lastVideoSample = &sample
}
}
} else if pkt.IsAudio {
@@ -409,31 +483,30 @@ func WriteToTrack(livestreamCursor *packets.QueueCursor, configuration *models.C
// If PCM_MULAW we can send it directly.
if hasAAC {
// We will transcode the audio
// We will transcode the audio from AAC to PCM_MULAW
// Not sure how to do this yet, but we need to use a decoder
// and then encode it to PCM_MULAW.
// TODO..
//d := fdkaac.NewAacDecoder()
continue
}
// Calculate the difference
bufferDuration := pkt.Time - previousTimeAudio
previousTimeAudio = pkt.Time
// We will send the audio
bufferDurationCasted := time.Duration(bufferDuration) * time.Millisecond
sample := pionMedia.Sample{Data: pkt.Data, Duration: bufferDurationCasted, PacketTimestamp: uint32(pkt.Time)}
//sample = pionMedia.Sample{Data: pkt.Data, Duration: time.Second}
if err := audioTrack.WriteSample(sample); err != nil && err != io.ErrClosedPipe {
log.Log.Error("webrtc.main.WriteToTrack(): something went wrong while writing sample: " + err.Error())
sample := pionMedia.Sample{Data: pkt.Data, PacketTimestamp: uint32(pkt.Time)}
if lastAudioSample != nil {
duration := sample.PacketTimestamp - lastAudioSample.PacketTimestamp
bufferDurationCasted := time.Duration(duration) * time.Millisecond
lastAudioSample.Duration = bufferDurationCasted
if err := audioTrack.WriteSample(*lastAudioSample); err != nil && err != io.ErrClosedPipe {
log.Log.Error("webrtc.main.WriteToTrack(): something went wrong while writing sample: " + err.Error())
}
}
lastAudioSample = &sample
}
}
}
for _, p := range peerConnections {
if p != nil {
p.Close()
}
}
peerConnectionCount = 0
log.Log.Info("webrtc.main.WriteToTrack(): stop writing to track.")

View File

@@ -9,7 +9,7 @@
},
"navigation": {
"profile": "Profile",
"admin": "admin",
"admin": "Admin",
"management": "Management",
"dashboard": "Dashboard",
"recordings": "Recordings",
@@ -32,11 +32,11 @@
"latest_events": "Latest events",
"configure_connection": "Configure connection",
"no_events": "No events",
"no_events_description": "No recordings where found, make sure your Kerberos Agent is properly configured.",
"no_events_description": "No recordings were found, make sure your Agent is properly configured.",
"motion_detected": "Motion was detected",
"live_view": "Live view",
"loading_live_view": "Loading live view",
"loading_live_view_description": "Hold on we are loading your live view here. If you didn't configure your camera connection, update it on the settings pages.",
"loading_live_view_description": "Hold on, we are loading your live view here. If you didn't configure your camera connection, update it on the settings pages.",
"time": "Time",
"description": "Description",
"name": "Name"
@@ -59,32 +59,32 @@
"persistence": "Persistence"
},
"info": {
"kerberos_hub_demo": "Have a look at our Kerberos Hub demo environment, to see Kerberos Hub in action!",
"configuration_updated_success": "Your configuration have been updated successfully.",
"kerberos_hub_demo": "Have a look at our Hub demo environment, to see Hub in action!",
"configuration_updated_success": "Your configuration has been updated successfully.",
"configuration_updated_error": "Something went wrong while saving.",
"verify_hub": "Verifying your Kerberos Hub settings.",
"verify_hub_success": "Kerberos Hub settings are successfully verified.",
"verify_hub_error": "Something went wrong while verifying Kerberos Hub",
"verify_hub": "Verifying your Hub settings.",
"verify_hub_success": "Hub settings are successfully verified.",
"verify_hub_error": "Something went wrong while verifying Hub.",
"verify_persistence": "Verifying your persistence settings.",
"verify_persistence_success": "Persistence settings are successfully verified.",
"verify_persistence_error": "Something went wrong while verifying the persistence",
"verify_persistence_error": "Something went wrong while verifying the persistence.",
"verify_camera": "Verifying your camera settings.",
"verify_camera_success": "Camera settings are successfully verified.",
"verify_camera_error": "Something went wrong while verifying the camera settings",
"verify_camera_error": "Something went wrong while verifying the camera settings.",
"verify_onvif": "Verifying your ONVIF settings.",
"verify_onvif_success": "ONVIF settings are successfully verified.",
"verify_onvif_error": "Something went wrong while verifying the ONVIF settings"
"verify_onvif_error": "Something went wrong while verifying the ONVIF settings."
},
"overview": {
"general": "General",
"description_general": "General settings for your Kerberos Agent",
"description_general": "General settings for your Agent",
"key": "Key",
"camera_name": "Camera name",
"camera_friendly_name": "Friendly name",
"timezone": "Timezone",
"select_timezone": "Select a timezone",
"advanced_configuration": "Advanced configuration",
"description_advanced_configuration": "Detailed configuration options to enable or disable specific parts of the Kerberos Agent",
"description_advanced_configuration": "Detailed configuration options to enable or disable specific parts of the Agent",
"offline_mode": "Offline mode",
"description_offline_mode": "Disable all outgoing traffic",
"encryption": "Encryption",
@@ -101,9 +101,9 @@
"camera": "Camera",
"description_camera": "Camera settings are required to make a connection to your camera of choice.",
"only_h264": "Currently only H264/H265 RTSP streams are supported.",
"rtsp_url": "RTSP url",
"rtsp_url": "RTSP URL",
"rtsp_h264": "A H264/H265 RTSP connection to your camera.",
"sub_rtsp_url": "Sub RTSP url (used for livestreaming)",
"sub_rtsp_url": "Sub RTSP URL (used for livestreaming)",
"sub_rtsp_h264": "A secondary RTSP connection to the low resolution of your camera.",
"onvif": "ONVIF",
"description_onvif": "Credentials to communicate with ONVIF capabilities. These are used for PTZ or other capabilities provided by the camera.",
@@ -115,28 +115,28 @@
},
"recording": {
"recording": "Recording",
"description_recording": "Specify how you would like to make recordings. Having a continuous 24/7 setup or a motion based recording.",
"description_recording": "Specify how you would like to make recordings. Having a continuous 24/7 setup or a motion-based recording.",
"continuous_recording": "Continuous recording",
"description_continuous_recording": "Make 24/7 or motion based recordings.",
"max_duration": "max video duration (seconds)",
"description_continuous_recording": "Make 24/7 or motion-based recordings.",
"max_duration": "Max video duration (seconds)",
"description_max_duration": "The maximum duration of a recording.",
"pre_recording": "pre recording (key frames buffered)",
"pre_recording": "Pre recording (key frames buffered)",
"description_pre_recording": "Seconds before an event occurred.",
"post_recording": "post recording (seconds)",
"post_recording": "Post recording (seconds)",
"description_post_recording": "Seconds after an event occurred.",
"threshold": "Recording threshold (pixels)",
"description_threshold": "The number of pixels changed to record",
"description_threshold": "The number of pixels changed to record.",
"autoclean": "Auto clean",
"description_autoclean": "Specify if the Kerberos Agent can cleanup recordings when a specific storage capacity (MB) is reached. This will remove the oldest recordings when the capacity is reached.",
"description_autoclean": "Specify if the Agent can clean up recordings when a specific storage capacity (MB) is reached. This will remove the oldest recordings when the capacity is reached.",
"autoclean_enable": "Enable auto clean",
"autoclean_description_enable": "Remove oldest recording when capacity reached.",
"autoclean_max_directory_size": "Maximum directory size (MB)",
"autoclean_description_max_directory_size": "The maximum MB's of recordings stored.",
"autoclean_description_max_directory_size": "The maximum MBs of recordings stored.",
"fragmentedrecordings": "Fragmented recordings",
"description_fragmentedrecordings": "When recordings are fragmented they are suitable for an HLS stream. When turned on the MP4 container will look a bit different.",
"description_fragmentedrecordings": "When recordings are fragmented they are suitable for an HLS stream. When turned on, the MP4 container will look a bit different.",
"fragmentedrecordings_enable": "Enable fragmentation",
"fragmentedrecordings_description_enable": "Fragmented recordings are required for HLS.",
"fragmentedrecordings_duration": "fragment duration",
"fragmentedrecordings_duration": "Fragment duration",
"fragmentedrecordings_description_duration": "Duration of a single fragment."
},
"streaming": {
@@ -149,16 +149,16 @@
"force_turn": "Force TURN",
"force_turn_description": "Force TURN usage, even when STUN is available.",
"stun_turn_forward": "Forwarding and transcoding",
"stun_turn_description_forward": "Optimisations and enhancements for TURN/STUN communication.",
"stun_turn_description_forward": "Optimizations and enhancements for TURN/STUN communication.",
"stun_turn_webrtc": "Forwarding to WebRTC broker",
"stun_turn_description_webrtc": "Forward h264 stream through MQTT",
"stun_turn_description_webrtc": "Forward H264 stream through MQTT",
"stun_turn_transcode": "Transcode stream",
"stun_turn_description_transcode": "Convert stream to a lower resolution",
"stun_turn_downscale": "Downscale resolution (in % of original resolution)",
"mqtt": "MQTT",
"description_mqtt": "A MQTT broker is used to communicate from",
"description2_mqtt": "to the Kerberos Agent, to achieve for example livestreaming or ONVIF (PTZ) capabilities.",
"mqtt_brokeruri": "Broker Uri",
"description_mqtt": "An MQTT broker is used to communicate from",
"description2_mqtt": "to the Agent, to achieve for example livestreaming or ONVIF (PTZ) capabilities.",
"mqtt_brokeruri": "Broker URI",
"mqtt_username": "Username",
"mqtt_password": "Password",
"realtimeprocessing": "Realtime Processing",
@@ -180,55 +180,61 @@
"friday": "Friday",
"saturday": "Saturday",
"externalcondition": "External Condition",
"description_externalcondition": "Depending on an external webservice recording can be enabled or disabled.",
"description_externalcondition": "Depending on an external web service, recording can be enabled or disabled.",
"regionofinterest": "Region Of Interest",
"description_regionofinterest": "By defining one or more regions, motion will be tracked only in the regions you have defined."
},
"persistence": {
"kerberoshub": "Kerberos Hub",
"description_kerberoshub": "Kerberos Agents can send heartbeats to a central",
"description2_kerberoshub": "installation. Heartbeats and other relevant information are synced to Kerberos Hub to show realtime information about your video landscape.",
"kerberoshub": "Hub",
"description_kerberoshub": "Agents can send heartbeats to a central",
"description2_kerberoshub": "installation. Heartbeats and other relevant information are synced to Hub to show realtime information about your video landscape.",
"persistence": "Persistence",
"saasoffering": "Kerberos Hub (SAAS offering)",
"secondary_persistence": "Secondary Persistence",
"description_secondary_persistence": "Recordings will be sent to secondary persistence if the primary persistence is unavailable or fails. This can be useful for failover purposes.",
"saasoffering": "Hub (SaaS offering)",
"description_persistence": "Having the ability to store your recordings is the beginning of everything. You can choose between our",
"description2_persistence": ", or a 3rd party provider",
"select_persistence": "Select a persistence",
"kerberoshub_encryption": "Encryption",
"kerberoshub_encryption_description": "All traffic from/to Kerberos Hub will encrypted using AES-256.",
"kerberoshub_proxyurl": "Kerberos Hub Proxy URL",
"kerberoshub_encryption_description": "All traffic from/to Hub will be encrypted using AES-256.",
"kerberoshub_proxyurl": "Hub Proxy URL",
"kerberoshub_description_proxyurl": "The Proxy endpoint for uploading your recordings.",
"kerberoshub_apiurl": "Kerberos Hub API URL",
"kerberoshub_apiurl": "Hub API URL",
"kerberoshub_description_apiurl": "The API endpoint for uploading your recordings.",
"kerberoshub_publickey": "Public key",
"kerberoshub_description_publickey": "The public key granted to your Kerberos Hub account.",
"kerberoshub_description_publickey": "The public key granted to your Hub account.",
"kerberoshub_privatekey": "Private key",
"kerberoshub_description_privatekey": "The private key granted to your Kerberos Hub account.",
"kerberoshub_description_privatekey": "The private key granted to your Hub account.",
"kerberoshub_site": "Site",
"kerberoshub_description_site": "The site ID the Kerberos Agents are belonging to in Kerberos Hub.",
"kerberoshub_description_site": "The site ID the Agents belong to in Hub.",
"kerberoshub_region": "Region",
"kerberoshub_description_region": "The region we are storing our recordings in.",
"kerberoshub_bucket": "Bucket",
"kerberoshub_description_bucket": "The bucket we are storing our recordings in.",
"kerberoshub_username": "Username/Directory (should match Kerberos Hub username)",
"kerberoshub_description_username": "The username of your Kerberos Hub account.",
"kerberosvault_apiurl": "Kerberos Vault API URL",
"kerberosvault_description_apiurl": "The Kerberos Vault API",
"kerberoshub_username": "Username/Directory (should match Hub username)",
"kerberoshub_description_username": "The username of your Hub account.",
"kerberosvault_apiurl": "Vault API URL",
"kerberosvault_description_apiurl": "The Vault API",
"kerberosvault_provider": "Provider",
"kerberosvault_description_provider": "The provider to which your recordings will be send.",
"kerberosvault_directory": "Directory (should match Kerberos Hub username)",
"kerberosvault_description_directory": "Sub directory the recordings will be stored in your provider.",
"kerberosvault_description_provider": "The provider to which your recordings will be sent.",
"kerberosvault_directory": "Directory (should match Hub username)",
"kerberosvault_description_directory": "Subdirectory the recordings will be stored in your provider.",
"kerberosvault_accesskey": "Access key",
"kerberosvault_description_accesskey": "The access key of your Kerberos Vault account.",
"kerberosvault_description_accesskey": "The access key of your Vault account.",
"kerberosvault_secretkey": "Secret key",
"kerberosvault_description_secretkey": "The secret key of your Kerberos Vault account.",
"kerberosvault_description_secretkey": "The secret key of your Vault account.",
"kerberosvault_maxretries": "Max retries",
"kerberosvault_description_maxretries": "The maximum number of retries to upload a recording.",
"kerberosvault_timeout": "Timeout",
"kerberosvault_description_timeout": "If a timeout occurs, recordings will be sent directly to the secondary Vault.",
"dropbox_directory": "Directory",
"dropbox_description_directory": "The sub directory where the recordings will be stored in your Dropbox account.",
"dropbox_description_directory": "The subdirectory where the recordings will be stored in your Dropbox account.",
"dropbox_accesstoken": "Access token",
"dropbox_description_accesstoken": "The access token of your Dropbox account/app.",
"verify_connection": "Verify Connection",
"remove_after_upload": "Once recordings are uploaded to some persistence, you might want to remove them from the local Kerberos Agent.",
"remove_after_upload": "Once recordings are uploaded to some persistence, you might want to remove them from the local Agent.",
"remove_after_upload_description": "Remove recordings after they are uploaded successfully.",
"remove_after_upload_enabled": "Enabled delete on upload"
"remove_after_upload_enabled": "Enable delete on upload"
}
}
}
}

View File

@@ -226,4 +226,4 @@
"remove_after_upload_enabled": "Abilita cancellazione al caricamento"
}
}
}
}

View File

@@ -226,4 +226,4 @@
"remove_after_upload_enabled": "Enabled delete on upload"
}
}
}
}

View File

@@ -0,0 +1,234 @@
{
"breadcrumb": {
"watch_recordings": "Xem bản ghi",
"configure": "Cấu hình"
},
"buttons": {
"save": "Lưu",
"verify_connection": "Xác minh kết nối"
},
"navigation": {
"profile": "Hồ sơ",
"admin": "Quản trị",
"management": "Quản lý",
"dashboard": "Bảng điều khiển",
"recordings": "Bản ghi",
"settings": "Cài đặt",
"help_support": "Trợ giúp & Hỗ trợ",
"swagger": "API Swagger",
"documentation": "Tài liệu",
"ui_library": "Thư viện UI",
"layout": "Ngôn ngữ & Bố cục",
"choose_language": "Chọn ngôn ngữ"
},
"dashboard": {
"title": "Bảng điều khiển",
"heading": "Tổng quan về giám sát video của bạn",
"number_of_days": "Số ngày",
"total_recordings": "Tổng số bản ghi",
"connected": "Đã kết nối",
"not_connected": "Chưa kết nối",
"offline_mode": "Chế độ ngoại tuyến",
"latest_events": "Sự kiện gần đây",
"configure_connection": "Cấu hình kết nối",
"no_events": "Không có sự kiện",
"no_events_description": "Không tìm thấy bản ghi nào, hãy đảm bảo Kerberos Agent của bạn được cấu hình đúng cách.",
"motion_detected": "Phát hiện chuyển động",
"live_view": "Xem trực tiếp",
"loading_live_view": "Đang tải xem trực tiếp",
"loading_live_view_description": "Vui lòng chờ trong khi chúng tôi tải xem trực tiếp của bạn. Nếu bạn chưa cấu hình kết nối camera, hãy cập nhật trong trang cài đặt.",
"time": "Thời gian",
"description": "Mô tả",
"name": "Tên"
},
"recordings": {
"title": "Bản ghi",
"heading": "Tất cả bản ghi của bạn ở một nơi",
"search_media": "Tìm kiếm phương tiện"
},
"settings": {
"title": "Cài đặt",
"heading": "Thiết lập camera của bạn",
"submenu": {
"all": "Tất cả",
"overview": "Tổng quan",
"camera": "Camera",
"recording": "Ghi hình",
"streaming": "Truyền phát",
"conditions": "Điều kiện",
"persistence": "Lưu trữ"
},
"info": {
"kerberos_hub_demo": "Xem thử môi trường demo của Kerberos Hub để thấy Kerberos Hub hoạt động như thế nào!",
"configuration_updated_success": "Cấu hình của bạn đã được cập nhật thành công.",
"configuration_updated_error": "Đã xảy ra lỗi khi lưu.",
"verify_hub": "Đang xác minh cài đặt Kerberos Hub của bạn.",
"verify_hub_success": "Cài đặt Kerberos Hub đã được xác minh thành công.",
"verify_hub_error": "Đã xảy ra lỗi khi xác minh Kerberos Hub",
"verify_persistence": "Đang xác minh cài đặt lưu trữ.",
"verify_persistence_success": "Cài đặt lưu trữ đã được xác minh thành công.",
"verify_persistence_error": "Đã xảy ra lỗi khi xác minh lưu trữ",
"verify_camera": "Đang xác minh cài đặt camera.",
"verify_camera_success": "Cài đặt camera đã được xác minh thành công.",
"verify_camera_error": "Đã xảy ra lỗi khi xác minh cài đặt camera",
"verify_onvif": "Đang xác minh cài đặt ONVIF.",
"verify_onvif_success": "Cài đặt ONVIF đã được xác minh thành công.",
"verify_onvif_error": "Đã xảy ra lỗi khi xác minh cài đặt ONVIF"
},
"overview": {
"general": "Chung",
"description_general": "Cài đặt chung cho Kerberos Agent của bạn",
"key": "Khóa",
"camera_name": "Tên camera",
"camera_friendly_name": "Tên thân thiện",
"timezone": "Múi giờ",
"select_timezone": "Chọn múi giờ",
"advanced_configuration": "Cấu hình nâng cao",
"description_advanced_configuration": "Tùy chọn cấu hình chi tiết để bật hoặc tắt các phần cụ thể của Kerberos Agent",
"offline_mode": "Chế độ ngoại tuyến",
"description_offline_mode": "Vô hiệu hóa toàn bộ lưu lượng đi",
"encryption": "Mã hóa",
"description_encryption": "Bật mã hóa cho toàn bộ lưu lượng đi. Các tin nhắn MQTT và/hoặc bản ghi sẽ được mã hóa bằng AES-256. Một khóa riêng tư được sử dụng để ký.",
"encryption_enabled": "Bật mã hóa MQTT",
"description_encryption_enabled": "Bật mã hóa cho toàn bộ tin nhắn MQTT.",
"encryption_recordings_enabled": "Bật mã hóa bản ghi",
"description_encryption_recordings_enabled": "Bật mã hóa cho tất cả các bản ghi.",
"encryption_fingerprint": "Dấu vân tay",
"encryption_privatekey": "Khóa riêng tư",
"encryption_symmetrickey": "Khóa đối xứng"
},
"camera": {
"camera": "Camera",
"description_camera": "Cài đặt camera là bắt buộc để kết nối với camera bạn chọn.",
"only_h264": "Hiện tại chỉ hỗ trợ luồng RTSP H264/H265.",
"rtsp_url": "URL RTSP",
"rtsp_h264": "Kết nối RTSP H264/H265 với camera của bạn.",
"sub_rtsp_url": "URL RTSP phụ (dùng để phát trực tiếp)",
"sub_rtsp_h264": "Kết nối RTSP phụ với độ phân giải thấp của camera.",
"onvif": "ONVIF",
"description_onvif": "Thông tin xác thực để giao tiếp với các chức năng ONVIF. Chúng được sử dụng cho PTZ hoặc các khả năng khác do camera cung cấp.",
"onvif_xaddr": "Địa chỉ ONVIF",
"onvif_username": "Tên người dùng ONVIF",
"onvif_password": "Mật khẩu ONVIF",
"verify_connection": "Xác minh kết nối",
"verify_sub_connection": "Xác minh kết nối phụ"
},
"recording": {
"recording": "Ghi hình",
"description_recording": "Chỉ định cách bạn muốn thực hiện ghi hình. Có thể ghi liên tục 24/7 hoặc dựa trên chuyển động.",
"continuous_recording": "Ghi hình liên tục",
"description_continuous_recording": "Ghi hình liên tục 24/7 hoặc dựa trên chuyển động.",
"max_duration": "Thời lượng video tối đa (giây)",
"description_max_duration": "Thời lượng tối đa của một bản ghi.",
"pre_recording": "Ghi trước (khung hình chính được lưu vào bộ đệm)",
"description_pre_recording": "Số giây trước khi sự kiện xảy ra.",
"post_recording": "Ghi sau (giây)",
"description_post_recording": "Số giây sau khi sự kiện xảy ra.",
"threshold": "Ngưỡng ghi hình (pixel)",
"description_threshold": "Số pixel thay đổi cần đạt để bắt đầu ghi hình.",
"autoclean": "Tự động dọn dẹp",
"description_autoclean": "Chỉ định xem Kerberos Agent có thể dọn dẹp các bản ghi khi dung lượng lưu trữ đạt giới hạn nhất định (MB) hay không. Hệ thống sẽ xóa bản ghi cũ nhất khi đạt giới hạn.",
"autoclean_enable": "Bật tự động dọn dẹp",
"autoclean_description_enable": "Xóa bản ghi cũ nhất khi đạt giới hạn dung lượng.",
"autoclean_max_directory_size": "Dung lượng thư mục tối đa (MB)",
"autoclean_description_max_directory_size": "Dung lượng tối đa (MB) của các bản ghi được lưu trữ.",
"fragmentedrecordings": "Ghi hình phân đoạn",
"description_fragmentedrecordings": "Khi các bản ghi được phân đoạn, chúng phù hợp để phát trực tuyến HLS. Khi bật, định dạng MP4 sẽ có một số khác biệt.",
"fragmentedrecordings_enable": "Bật ghi hình phân đoạn",
"fragmentedrecordings_description_enable": "Ghi hình phân đoạn là bắt buộc đối với HLS.",
"fragmentedrecordings_duration": "Thời lượng phân đoạn",
"fragmentedrecordings_description_duration": "Thời lượng của một phân đoạn duy nhất."
},
"streaming": {
"stun_turn": "STUN/TURN cho WebRTC",
"description_stun_turn": "Để phát trực tiếp độ phân giải đầy đủ, chúng tôi sử dụng khái niệm WebRTC. Một trong những tính năng chính là ICE-candidate, cho phép vượt qua NAT bằng STUN/TURN.",
"stun_server": "Máy chủ STUN",
"turn_server": "Máy chủ TURN",
"turn_username": "Tên người dùng",
"turn_password": "Mật khẩu",
"force_turn": "Buộc sử dụng TURN",
"force_turn_description": "Buộc sử dụng TURN ngay cả khi STUN có sẵn.",
"stun_turn_forward": "Chuyển tiếp và mã hóa",
"stun_turn_description_forward": "Tối ưu hóa và cải thiện giao tiếp TURN/STUN.",
"stun_turn_webrtc": "Chuyển tiếp đến WebRTC broker",
"stun_turn_description_webrtc": "Chuyển tiếp luồng H264 qua MQTT",
"stun_turn_transcode": "Chuyển mã luồng",
"stun_turn_description_transcode": "Chuyển đổi luồng sang độ phân giải thấp hơn",
"stun_turn_downscale": "Giảm độ phân giải (theo % của độ phân giải gốc)",
"mqtt": "MQTT",
"description_mqtt": "Một MQTT broker được sử dụng để giao tiếp từ",
"description2_mqtt": "đến Kerberos Agent, nhằm hỗ trợ phát trực tiếp hoặc chức năng ONVIF (PTZ).",
"mqtt_brokeruri": "Broker Uri",
"mqtt_username": "Tên người dùng",
"mqtt_password": "Mật khẩu",
"realtimeprocessing": "Xử lý thời gian thực",
"description_realtimeprocessing": "Bằng cách bật xử lý thời gian thực, bạn sẽ nhận được các khung hình video thời gian thực qua kết nối MQTT đã chỉ định.",
"realtimeprocessing_topic": "Chủ đề để xuất bản",
"realtimeprocessing_enabled": "Bật xử lý thời gian thực",
"description_realtimeprocessing_enabled": "Gửi khung hình video thời gian thực qua MQTT."
},
"conditions": {
"timeofinterest": "Thời gian quan tâm",
"description_timeofinterest": "Chỉ ghi hình trong các khoảng thời gian cụ thể (dựa trên múi giờ).",
"timeofinterest_enabled": "Đã bật",
"timeofinterest_description_enabled": "Nếu bật, bạn có thể chỉ định các khoảng thời gian ghi hình.",
"sunday": "Chủ nhật",
"monday": "Thứ hai",
"tuesday": "Thứ ba",
"wednesday": "Thứ tư",
"thursday": "Thứ năm",
"friday": "Thứ sáu",
"saturday": "Thứ bảy",
"externalcondition": "Điều kiện bên ngoài",
"description_externalcondition": "Tùy thuộc vào một dịch vụ web bên ngoài, việc ghi hình có thể được bật hoặc tắt.",
"regionofinterest": "Khu vực quan tâm",
"description_regionofinterest": "Bằng cách xác định một hoặc nhiều khu vực, hệ thống sẽ chỉ theo dõi chuyển động trong các khu vực bạn đã chọn."
},
"persistence": {
"kerberoshub": "Kerberos Hub",
"description_kerberoshub": "Các Kerberos Agent có thể gửi tín hiệu nhịp tim đến một hệ thống trung tâm",
"description2_kerberoshub": "để đồng bộ hóa thông tin quan trọng với Kerberos Hub, giúp hiển thị trạng thái giám sát video theo thời gian thực.",
"persistence": "Lưu trữ",
"saasoffering": "Kerberos Hub (dịch vụ SAAS)",
"description_persistence": "Khả năng lưu trữ bản ghi là bước khởi đầu của mọi thứ. Bạn có thể chọn giữa dịch vụ của chúng tôi",
"description2_persistence": "hoặc một nhà cung cấp bên thứ ba.",
"select_persistence": "Chọn phương thức lưu trữ",
"kerberoshub_encryption": "Mã hóa",
"kerberoshub_encryption_description": "Tất cả lưu lượng đến/từ Kerberos Hub sẽ được mã hóa bằng AES-256.",
"kerberoshub_proxyurl": "URL Proxy Kerberos Hub",
"kerberoshub_description_proxyurl": "Điểm cuối Proxy để tải bản ghi lên.",
"kerberoshub_apiurl": "URL API Kerberos Hub",
"kerberoshub_description_apiurl": "Điểm cuối API để tải bản ghi lên.",
"kerberoshub_publickey": "Khóa công khai",
"kerberoshub_description_publickey": "Khóa công khai được cấp cho tài khoản Kerberos Hub của bạn.",
"kerberoshub_privatekey": "Khóa riêng tư",
"kerberoshub_description_privatekey": "Khóa riêng tư được cấp cho tài khoản Kerberos Hub của bạn.",
"kerberoshub_site": "Trang web",
"kerberoshub_description_site": "ID trang web mà các Kerberos Agent thuộc về trong Kerberos Hub.",
"kerberoshub_region": "Khu vực",
"kerberoshub_description_region": "Khu vực nơi chúng tôi lưu trữ bản ghi.",
"kerberoshub_bucket": "Kho lưu trữ",
"kerberoshub_description_bucket": "Kho lưu trữ nơi chúng tôi lưu trữ bản ghi.",
"kerberoshub_username": "Tên người dùng / Thư mục (phải khớp với tên người dùng Kerberos Hub)",
"kerberoshub_description_username": "Tên người dùng tài khoản Kerberos Hub của bạn.",
"kerberosvault_apiurl": "URL API Kerberos Vault",
"kerberosvault_description_apiurl": "API của Kerberos Vault",
"kerberosvault_provider": "Nhà cung cấp",
"kerberosvault_description_provider": "Nhà cung cấp nơi bản ghi của bạn sẽ được gửi đến.",
"kerberosvault_directory": "Thư mục (phải khớp với tên người dùng Kerberos Hub)",
"kerberosvault_description_directory": "Thư mục con nơi các bản ghi sẽ được lưu trữ trong nhà cung cấp của bạn.",
"kerberosvault_accesskey": "Khóa truy cập",
"kerberosvault_description_accesskey": "Khóa truy cập của tài khoản Kerberos Vault của bạn.",
"kerberosvault_secretkey": "Khóa bí mật",
"kerberosvault_description_secretkey": "Khóa bí mật của tài khoản Kerberos Vault của bạn.",
"dropbox_directory": "Thư mục",
"dropbox_description_directory": "Thư mục con nơi bản ghi sẽ được lưu trữ trong tài khoản Dropbox của bạn.",
"dropbox_accesstoken": "Mã truy cập",
"dropbox_description_accesstoken": "Mã truy cập của tài khoản / ứng dụng Dropbox của bạn.",
"verify_connection": "Xác minh kết nối",
"remove_after_upload": "Sau khi bản ghi được tải lên một hệ thống lưu trữ, bạn có thể muốn xóa chúng khỏi Kerberos Agent cục bộ.",
"remove_after_upload_description": "Xóa bản ghi sau khi chúng được tải lên thành công.",
"remove_after_upload_enabled": "Bật xóa sau khi tải lên"
}
}
}

View File

@@ -4,6 +4,7 @@ import {
doVerifyOnvif,
doVerifyHub,
doVerifyPersistence,
doVerifySecondaryPersistence,
doGetKerberosAgentTags,
doGetDashboardInformation,
doGetEvents,
@@ -107,6 +108,28 @@ export const verifyPersistence = (config, onSuccess, onError) => {
};
};
export const verifySecondaryPersistence = (config, onSuccess, onError) => {
return (dispatch) => {
doVerifySecondaryPersistence(
config,
() => {
dispatch({
type: 'VERIFY_SECONDARY_PERSISTENCE',
});
if (onSuccess) {
onSuccess();
}
},
(error) => {
const { data } = error.response.data;
if (onError) {
onError(data);
}
}
);
};
};
export const verifyHub = (config, onSuccess, onError) => {
return (dispatch) => {
doVerifyHub(

View File

@@ -72,6 +72,25 @@ export function doVerifyPersistence(config, onSuccess, onError) {
});
}
export function doVerifySecondaryPersistence(config, onSuccess, onError) {
const endpoint = API.post(`persistence/secondary/verify`, {
...config,
});
endpoint
.then((res) => {
if (res.status !== 200) {
throw new Error(res.data);
}
return res.data;
})
.then((data) => {
onSuccess(data);
})
.catch((error) => {
onError(error);
});
}
export function doVerifyHub(config, onSuccess, onError) {
const endpoint = API.post(`hub/verify`, {
...config,

View File

@@ -9,9 +9,9 @@ const dev = {
ENV: 'dev',
// Comment the below lines, when using codespaces or other special DNS names (which you can't control)
HOSTNAME: hostname,
API_URL: `${protocol}//${hostname}:80/api`,
URL: `${protocol}//${hostname}:80`,
WS_URL: `${websocketprotocol}//${hostname}:80/ws`,
API_URL: `${protocol}//${hostname}:8080/api`,
URL: `${protocol}//${hostname}:8080`,
WS_URL: `${websocketprotocol}//${hostname}:8080/ws`,
MODE: window['env']['mode'],
// Uncomment, and comment the above lines, when using codespaces or other special DNS names (which you can't control)
// HOSTNAME: externalHost,

View File

@@ -33,6 +33,7 @@ import {
verifyCamera,
verifyHub,
verifyPersistence,
verifySecondaryPersistence,
getConfig,
updateConfig,
} from '../../actions/agent';
@@ -63,6 +64,9 @@ class Settings extends React.Component {
verifyPersistenceSuccess: false,
verifyPersistenceError: false,
verifyPersistenceMessage: '',
verifySecondaryPersistenceSuccess: false,
verifySecondaryPersistenceError: false,
verifySecondaryPersistenceMessage: '',
verifyCameraSuccess: false,
verifyCameraError: false,
verifyCameraMessage: '',
@@ -70,6 +74,7 @@ class Settings extends React.Component {
verifyOnvifError: false,
verifyOnvifErrorMessage: '',
loading: false,
loadingSecondary: false,
loadingHub: false,
loadingCamera: false,
};
@@ -125,6 +130,8 @@ class Settings extends React.Component {
this.onUpdateTimeline = this.onUpdateTimeline.bind(this);
this.initialiseLiveview = this.initialiseLiveview.bind(this);
this.verifyPersistenceSettings = this.verifyPersistenceSettings.bind(this);
this.verifySecondaryPersistenceSettings =
this.verifySecondaryPersistenceSettings.bind(this);
this.verifyHubSettings = this.verifyHubSettings.bind(this);
this.verifyCameraSettings = this.verifyCameraSettings.bind(this);
this.verifySubCameraSettings = this.verifySubCameraSettings.bind(this);
@@ -350,6 +357,8 @@ class Settings extends React.Component {
configSuccess: false,
configError: false,
loadingCamera: false,
loading: false,
loadingSecondary: false,
loadingOnvif: true,
});
@@ -390,6 +399,8 @@ class Settings extends React.Component {
configError: false,
verifyPersistenceSuccess: false,
verifyPersistenceError: false,
verifySecondaryPersistenceSuccess: false,
verifySecondaryPersistenceError: false,
verifyHubSuccess: false,
verifyHubError: false,
verifyHubErrorMessage: '',
@@ -401,6 +412,8 @@ class Settings extends React.Component {
verifyOnvifSuccess: false,
verifyOnvifError: false,
loadingHub: true,
loading: false,
loadingSecondary: false,
});
// .... test fields
@@ -441,6 +454,8 @@ class Settings extends React.Component {
verifyHubError: false,
verifyPersistenceSuccess: false,
verifyPersistenceError: false,
verifySecondaryPersistenceSuccess: false,
verifySecondaryPersistenceError: false,
persistenceSuccess: false,
persistenceError: false,
verifyCameraSuccess: false,
@@ -449,6 +464,7 @@ class Settings extends React.Component {
verifyOnvifError: false,
verifyCameraErrorMessage: '',
loading: true,
loadingSecondary: false,
});
dispatchVerifyPersistence(
@@ -461,6 +477,7 @@ class Settings extends React.Component {
persistenceSuccess: false,
persistenceError: false,
loading: false,
loadingSecondary: false,
});
},
(error) => {
@@ -471,6 +488,58 @@ class Settings extends React.Component {
persistenceSuccess: false,
persistenceError: false,
loading: false,
loadingSecondary: false,
});
}
);
}
}
verifySecondaryPersistenceSettings() {
const { config, dispatchVerifySecondaryPersistence } = this.props;
if (config) {
this.setState({
configSuccess: false,
configError: false,
verifyHubSuccess: false,
verifyHubError: false,
verifyPersistenceSuccess: false,
verifyPersistenceError: false,
verifySecondaryPersistenceSuccess: false,
verifySecondaryPersistenceError: false,
persistenceSuccess: false,
persistenceError: false,
verifyCameraSuccess: false,
verifyCameraError: false,
verifyOnvifSuccess: false,
verifyOnvifError: false,
verifyCameraErrorMessage: '',
loading: false,
loadingSecondary: true,
});
dispatchVerifySecondaryPersistence(
config.config,
() => {
this.setState({
verifySecondaryPersistenceSuccess: true,
verifySecondaryPersistenceError: false,
verifySecondaryPersistenceMessage: '',
persistenceSuccess: false,
persistenceError: false,
loading: false,
loadingSecondary: false,
});
},
(error) => {
this.setState({
verifySecondaryPersistenceSuccess: false,
verifySecondaryPersistenceError: true,
verifySecondaryPersistenceMessage: error,
persistenceSuccess: false,
persistenceError: false,
loading: false,
loadingSecondary: false,
});
}
);
@@ -537,6 +606,9 @@ class Settings extends React.Component {
verifyPersistenceSuccess,
verifyPersistenceError,
verifyPersistenceMessage,
verifySecondaryPersistenceSuccess,
verifySecondaryPersistenceError,
verifySecondaryPersistenceMessage,
verifyCameraSuccess,
verifyCameraError,
verifyCameraErrorMessage,
@@ -546,6 +618,7 @@ class Settings extends React.Component {
verifyOnvifErrorMessage,
loadingCamera,
loading,
loadingSecondary,
loadingHub,
} = this.state;
@@ -798,6 +871,20 @@ class Settings extends React.Component {
)} :${verifyPersistenceMessage}`}
/>
)}
{verifySecondaryPersistenceSuccess && (
<InfoBar
type="success"
message={t('settings.info.verify_persistence_success')}
/>
)}
{verifySecondaryPersistenceError && (
<InfoBar
type="alert"
message={`${t(
'settings.info.verify_persistence_error'
)} :${verifySecondaryPersistenceMessage}`}
/>
)}
<div className="stats grid-container --two-columns">
<div>
{/* General settings block */}
@@ -2449,6 +2536,43 @@ class Settings extends React.Component {
)
}
/>
<Input
noPadding
label={t(
'settings.persistence.kerberosvault_maxretries'
)}
placeholder={t(
'settings.persistence.kerberosvault_description_maxretries'
)}
value={
config.kstorage ? config.kstorage.max_retries : ''
}
onChange={(value) =>
this.onUpdateField(
'kstorage',
'max_retries',
value,
config.kstorage
)
}
/>
<Input
noPadding
label={t('settings.persistence.kerberosvault_timeout')}
placeholder={t(
'settings.persistence.kerberosvault_description_timeout'
)}
value={config.kstorage ? config.kstorage.timeout : ''}
onChange={(value) =>
this.onUpdateField(
'kstorage',
'timeout',
value,
config.kstorage
)
}
/>
</>
)}
{config.cloud === this.DROPBOX && (
@@ -2508,6 +2632,140 @@ class Settings extends React.Component {
</BlockFooter>
</Block>
)}
{/* Secondary Vault block */}
{showPersistenceSection && config.cloud === this.KERBEROS_VAULT && (
<Block>
<BlockHeader>
<h4>{t('settings.persistence.secondary_persistence')}</h4>
</BlockHeader>
<BlockBody>
<p>
{t(
'settings.persistence.description_secondary_persistence'
)}
</p>
<Input
noPadding
label={t('settings.persistence.kerberosvault_apiurl')}
placeholder={t(
'settings.persistence.kerberosvault_description_apiurl'
)}
value={
config.kstorage_secondary
? config.kstorage_secondary.uri
: ''
}
onChange={(value) =>
this.onUpdateField(
'kstorage_secondary',
'uri',
value,
config.kstorage_secondary
)
}
/>
<Input
noPadding
label={t('settings.persistence.kerberosvault_provider')}
placeholder={t(
'settings.persistence.kerberosvault_description_provider'
)}
value={
config.kstorage_secondary
? config.kstorage_secondary.provider
: ''
}
onChange={(value) =>
this.onUpdateField(
'kstorage_secondary',
'provider',
value,
config.kstorage_secondary
)
}
/>
<Input
noPadding
label={t('settings.persistence.kerberosvault_directory')}
placeholder={t(
'settings.persistence.kerberosvault_description_directory'
)}
value={
config.kstorage_secondary
? config.kstorage_secondary.directory
: ''
}
onChange={(value) =>
this.onUpdateField(
'kstorage_secondary',
'directory',
value,
config.kstorage_secondary
)
}
/>
<Input
type="password"
iconright="activity"
label={t('settings.persistence.kerberosvault_accesskey')}
placeholder={t(
'settings.persistence.kerberosvault_description_accesskey'
)}
value={
config.kstorage_secondary
? config.kstorage_secondary.access_key
: ''
}
onChange={(value) =>
this.onUpdateField(
'kstorage_secondary',
'access_key',
value,
config.kstorage_secondary
)
}
/>
<Input
type="password"
iconright="activity"
label={t('settings.persistence.kerberosvault_secretkey')}
placeholder={t(
'settings.persistence.kerberosvault_description_secretkey'
)}
value={
config.kstorage_secondary
? config.kstorage_secondary.secret_access_key
: ''
}
onChange={(value) =>
this.onUpdateField(
'kstorage_secondary',
'secret_access_key',
value,
config.kstorage_secondary
)
}
/>
</BlockBody>
<BlockFooter>
<Button
label={t('settings.persistence.verify_connection')}
disabled={loadingSecondary}
onClick={this.verifySecondaryPersistenceSettings}
type={loadingSecondary ? 'neutral' : 'default'}
icon="verify"
/>
<Button
label="Save"
type="submit"
onClick={this.saveConfig}
buttonType="submit"
icon="pencil"
/>
</BlockFooter>
</Block>
)}
</div>
</div>
</div>
@@ -2532,6 +2790,8 @@ const mapDispatchToProps = (dispatch /* , ownProps */) => ({
dispatch(verifyHub(config, success, error)),
dispatchVerifyPersistence: (config, success, error) =>
dispatch(verifyPersistence(config, success, error)),
dispatchVerifySecondaryPersistence: (config, success, error) =>
dispatch(verifySecondaryPersistence(config, success, error)),
dispatchGetConfig: (callback) => dispatch(getConfig(callback)),
dispatchUpdateConfig: (field, value) => dispatch(updateConfig(field, value)),
dispatchSaveConfig: (config, success, error) =>
@@ -2549,6 +2809,7 @@ Settings.propTypes = {
images: PropTypes.array.isRequired,
dispatchVerifyHub: PropTypes.func.isRequired,
dispatchVerifyPersistence: PropTypes.func.isRequired,
dispatchVerifySecondaryPersistence: PropTypes.func.isRequired,
dispatchGetConfig: PropTypes.func.isRequired,
dispatchUpdateConfig: PropTypes.func.isRequired,
dispatchSaveConfig: PropTypes.func.isRequired,