diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 000000000..c140880df --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,42 @@ +name: Build Docker Image + +on: + workflow_dispatch: + push: + tags: + - "*" + +env: + REGISTRY: ghcr.io + IMAGE: anyshake/observer + +jobs: + build_docker_image: + runs-on: ubuntu-latest + + permissions: + packages: write + contents: read + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Login to Registry + uses: docker/login-action@v1 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and Push + uses: docker/build-push-action@v2 + with: + push: true + file: ./Dockerfile + tags: ${{ env.REGISTRY }}/${{ env.IMAGE }}:latest + + - name: Inspect Image + run: | + docker buildx imagetools inspect \ + ${{ env.REGISTRY }}/${{ env.IMAGE }}:latest diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e53b55a8a..47f7781b2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -48,7 +48,6 @@ jobs: if: steps.build_docs.outputs.status == 'success' run: | go mod download - cd build make -j$(nproc) echo "::set-output name=status::success" diff --git a/.gitignore b/.gitignore index f85d9ba23..9d21b456b 100644 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,5 @@ _testmain.go *.test *.prof -# AnyShake Observer build files -build/dist* -build/release* +build/dist +frontend/dist diff --git a/CHANGELOG.md b/CHANGELOG.md index bfbd6dd94..a58ed3968 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,31 @@ Starting from v2.2.5, all notable changes to this project will be documented in this file. +## v3.0.0 + +### Breaking Changes + +- **Data Protocol**: The AnyShake Explorer data protocol has been entirely refactored. **Please rebuild and flash the firmware of AnyShake Explorer to the latest version.** +- **Configuration File**: The configuration file layout has been completely overhauled. The old configuration file format is no longer supported. +- **SeedLink Server**: The SeedLink service has been temporarily removed and will be re-implemented in a future release. +- **API Endpoints**: Some request and response fields have been modified in API v1. Please refer to the built-in Swagger API documentation for details. + +### New Features + +- Added support for accessing AnyShake Explorer via a serial-to-Ethernet converter. +- Introduced custom channel prefixes (e.g., HH*, SH*, EH*). +- Added log dumping functionality with multiple output levels. +- Enhanced data processing and storage efficiency. +- Improved the accuracy of reading time from the Internet NTP server. +- Refined component lifecycle management using dependency injection for better module decoupling. +- Implemented an asynchronous message bus to optimize application execution efficiency. +- Established a GraphQL-based routing endpoint in preparation for API v2. +- Dockerized the application for easier and faster deployment. + +### Bug Fixes + +- Completely resolved the gap issue in MiniSEED records. + ## v2.12.5 - Fix gaps in MiniSEED records diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..d1c03c9d0 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +FROM golang:alpine AS builder + +RUN apk update && apk add --no-cache git bash wget curl make npm +WORKDIR /build +RUN git clone --progress https://github.com/anyshake/observer.git ./observer && \ + export VERSION=`cat ./observer/VERSION` && \ + cd ./observer/frontend/src && \ + npm install && \ + make && \ + cd ../../docs && \ + make && \ + cd ../cmd && \ + go mod tidy && \ + go build -ldflags "-s -w -X main.version=$VERSION -X main.release=docker_build" -trimpath -o /tmp/observer *.go + +FROM alpine + +COPY --from=builder /tmp/observer /usr/bin/observer +RUN chmod 755 /usr/bin/observer && \ + mkdir -p /etc/observer + +CMD ["observer", "-config=/etc/observer/config.json"] diff --git a/build/Makefile b/Makefile similarity index 61% rename from build/Makefile rename to Makefile index ea26765aa..b7cd6160e 100644 --- a/build/Makefile +++ b/Makefile @@ -1,16 +1,15 @@ -.PHONY: build clean version run windows +.PHONY: build clean gen docs version run windows BINARY=observer -VERSION=$(shell cat ../VERSION) +VERSION=$(shell cat ./VERSION) RELEASE=$(shell date +%Y%m%d%H%M%S) COMMIT=$(shell git rev-parse --short HEAD) -SRC_DIR=../cmd -DIST_DIR=./dist -ASSETS_DIR=./assets +SRC_DIR=./cmd +DIST_DIR=./build/dist +ASSETS_DIR=./build/assets -BUILD_ARCH=arm arm64 386 amd64 ppc64le riscv64 \ - mips mips64le mipsle loong64 s390x +BUILD_ARCH=arm arm64 386 amd64 ppc64le riscv64 loong64 s390x BUILD_FLAGS=-s -w -X main.version=$(VERSION) \ -X main.release=$(COMMIT)-$(RELEASE) BUILD_ARGS=-trimpath @@ -25,16 +24,21 @@ $(BUILD_ARCH): @cp -r $(ASSETS_DIR) $(DIST_DIR)/$@ windows: - @echo "Building Windows 32-bit & 64-bit ..." - @mkdir -p $(DIST_DIR)/win32 $(DIST_DIR)/win64 - @rm -rf $(DIST_DIR)/win32/* $(DIST_DIR)/win64/* - @CGO_ENABLED=0 GOOS=windows GOARCH=386 go build -ldflags="$(BUILD_FLAGS)" \ - $(BUILD_ARGS) -o $(DIST_DIR)/win32/$(BINARY).exe $(SRC_DIR)/*.go + @echo "Building Windows 64-bit ..." + @mkdir -p $(DIST_DIR)/win64 + @rm -rf $(DIST_DIR)/win64/* @CGO_ENABLED=0 GOOS=windows GOARCH=amd64 go build -ldflags="$(BUILD_FLAGS)" \ $(BUILD_ARGS) -o $(DIST_DIR)/win64/$(BINARY).exe $(SRC_DIR)/*.go - @cp -r $(ASSETS_DIR) $(DIST_DIR)/win32 @cp -r $(ASSETS_DIR) $(DIST_DIR)/win64 +gen: +ifeq ($(shell command -v gqlgen 2> /dev/null),) + @echo "Installing gqlgen..." + @go get github.com/99designs/gqlgen + @go install github.com/99designs/gqlgen +endif + @gqlgen generate + version: @go run $(SRC_DIR)/*.go --version diff --git a/README.md b/README.md index 745deb9a9..46d0c890b 100644 --- a/README.md +++ b/README.md @@ -15,8 +15,6 @@ AnyShake Observer is an open-source, cross-platform software that can be used to This software is written in Go and TypeScript, which means it can easily port to a variety of OS and CPU architectures, even embedded Linux devices, AnyShake Observer also supports PostgreSQL, MariaDB (MySQL) and SQL Server as seismic data archiving engines. -As of the release of the software documentation, AnyShake has successfully captured more than 40 earthquake events, the furthest captured earthquake event is [M 7.1 - 180 km NNE of Gili Air, Indonesia](https://earthquake.usgs.gov/earthquakes/eventpage/us7000krjx/executive), approximately 4,210 km, by the station located in Chongqing, China. - ## Documentation Please visit [anyshake.org/docs/introduction](https://anyshake.org/docs/introduction) for quick start guide and more information. diff --git a/VERSION b/VERSION index 8c118ca02..ad55eb85f 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -v2.12.5 +v3.0.0 diff --git a/app/v1/devel/module.go b/api/v1/devel/module.go similarity index 69% rename from app/v1/devel/module.go rename to api/v1/devel/module.go index 84dd36c31..f9dd6bb14 100644 --- a/app/v1/devel/module.go +++ b/api/v1/devel/module.go @@ -3,22 +3,20 @@ package devel import ( "net/http" - "github.com/anyshake/observer/app" + v1 "github.com/anyshake/observer/api/v1" _ "github.com/anyshake/observer/docs" "github.com/gin-gonic/gin" swaggerFiles "github.com/swaggo/files" gs "github.com/swaggo/gin-swagger" ) -func (d *Devel) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) { - if !options.FeatureOptions.Config.Server.Debug { - return - } - +func (d *Devel) Register(rg *gin.RouterGroup, resolver *v1.Resolver) error { rg.GET("/devel/*any", func(ctx *gin.Context) { if ctx.Param("any") == "/" { url := ctx.Request.URL ctx.Redirect(http.StatusMovedPermanently, url.Path+"/index.html") } }, gs.WrapHandler(swaggerFiles.Handler)) + + return nil } diff --git a/api/v1/devel/name.go b/api/v1/devel/name.go new file mode 100644 index 000000000..17996c53b --- /dev/null +++ b/api/v1/devel/name.go @@ -0,0 +1,5 @@ +package devel + +func (h *Devel) GetApiName() string { + return "devel" +} diff --git a/app/v1/devel/types.go b/api/v1/devel/types.go similarity index 100% rename from app/v1/devel/types.go rename to api/v1/devel/types.go diff --git a/api/v1/history/filter.go b/api/v1/history/filter.go new file mode 100644 index 000000000..6b25e67d4 --- /dev/null +++ b/api/v1/history/filter.go @@ -0,0 +1,46 @@ +package history + +import ( + "fmt" + "time" + + v1 "github.com/anyshake/observer/api/v1" + "github.com/anyshake/observer/drivers/dao/tables" + "github.com/anyshake/observer/drivers/explorer" +) + +func (h *History) filterHistory(startTime, endTime int64, maxDuration time.Duration, resolver *v1.Resolver) ([]explorer.ExplorerData, error) { + if endTime-startTime > maxDuration.Milliseconds() { + return nil, fmt.Errorf("duration is too large") + } + + var ( + adcCountModel tables.AdcCount + adcCountData []tables.AdcCount + ) + err := resolver.Database. + Table(adcCountModel.GetName()). + Where("timestamp >= ? AND timestamp <= ?", startTime, endTime). + Order("timestamp ASC"). + Find(&adcCountData). + Error + if err != nil { + return nil, err + } + + var explorerData []explorer.ExplorerData + for _, record := range adcCountData { + explorerData = append(explorerData, explorer.ExplorerData{ + Timestamp: record.Timestamp, + SampleRate: record.SampleRate, + Z_Axis: record.Z_Axis, + E_Axis: record.E_Axis, + N_Axis: record.N_Axis, + }) + } + + if len(explorerData) == 0 { + return nil, fmt.Errorf("no data available for the given time range") + } + return explorerData, nil +} diff --git a/api/v1/history/module.go b/api/v1/history/module.go new file mode 100644 index 000000000..c68e345b6 --- /dev/null +++ b/api/v1/history/module.go @@ -0,0 +1,84 @@ +package history + +import ( + "fmt" + "net/http" + + v1 "github.com/anyshake/observer/api/v1" + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/server/response" + "github.com/anyshake/observer/utils/logger" + "github.com/gin-gonic/gin" +) + +// @Summary AnyShake Observer waveform history +// @Description Get waveform count data in specified time range, channel and format, the maximum duration of the waveform data to be exported is 24 hours for JSON and 1 hour for SAC +// @Router /history [post] +// @Accept application/x-www-form-urlencoded +// @Produce application/json +// @Produce application/octet-stream +// @Param start_time formData int true "Start timestamp of the waveform data to be queried, in milliseconds (unix timestamp)" +// @Param end_time formData int true "End timestamp of the waveform data to be queried, in milliseconds (unix timestamp)" +// @Param format formData string true "Format of the waveform data to be queried, `json` or `sac`" +// @Param channel formData string false "Channel of the waveform, `Z`, `E` or `N`, reuqired when format is `sac`" +// @Failure 400 {object} response.HttpResponse "Failed to export waveform data due to invalid format or channel" +// @Failure 410 {object} response.HttpResponse "Failed to export waveform data due to no data available" +// @Failure 500 {object} response.HttpResponse "Failed to export waveform data due to failed to read data source" +// @Success 200 {object} response.HttpResponse{data=[]explorer.ExplorerData} "Successfully exported the waveform data" +func (h *History) Register(rg *gin.RouterGroup, resolver *v1.Resolver) error { + rg.POST("/history", func(c *gin.Context) { + var binding historyBinding + if err := c.ShouldBind(&binding); err != nil { + logger.GetLogger(h.GetApiName()).Errorln(err) + response.Error(c, http.StatusBadRequest) + return + } + + switch binding.Format { + case "json": + result, err := h.filterHistory(binding.StartTime, binding.EndTime, JSON_MAX_DURATION, resolver) + if err != nil { + logger.GetLogger(h.GetApiName()).Errorln(err) + response.Error(c, http.StatusGone) + return + } + response.Message(c, "The waveform data was successfully filtered", result) + return + case "sac": + result, err := h.filterHistory(binding.StartTime, binding.EndTime, SAC_MAX_DURATION, resolver) + if err != nil { + logger.GetLogger(h.GetApiName()).Errorln(err) + response.Error(c, http.StatusGone) + return + } + if binding.Channel != explorer.EXPLORER_CHANNEL_CODE_Z && + binding.Channel != explorer.EXPLORER_CHANNEL_CODE_E && + binding.Channel != explorer.EXPLORER_CHANNEL_CODE_N { + err := fmt.Errorf("no channel was selected") + logger.GetLogger(h.GetApiName()).Errorln(err) + response.Error(c, http.StatusBadRequest) + return + } + fileName, dataBytes, err := h.getSACBytes( + result, + resolver.Config.Stream.Station, + resolver.Config.Stream.Network, + resolver.Config.Stream.Location, + resolver.Config.Stream.Channel, + binding.Channel, + ) + if err != nil { + logger.GetLogger(h.GetApiName()).Errorln(err) + response.Error(c, http.StatusInternalServerError) + return + } + + response.File(c, fileName, dataBytes) + return + } + + response.Error(c, http.StatusBadRequest) + }) + + return nil +} diff --git a/api/v1/history/name.go b/api/v1/history/name.go new file mode 100644 index 000000000..1b9098a1d --- /dev/null +++ b/api/v1/history/name.go @@ -0,0 +1,5 @@ +package history + +func (h *History) GetApiName() string { + return "history" +} diff --git a/api/v1/history/sac.go b/api/v1/history/sac.go new file mode 100644 index 000000000..b9f2744b9 --- /dev/null +++ b/api/v1/history/sac.go @@ -0,0 +1,77 @@ +package history + +import ( + "fmt" + "time" + + "github.com/anyshake/observer/drivers/explorer" + "github.com/bclswl0827/sacio" +) + +func (h *History) getSACBytes(data []explorer.ExplorerData, stationCode, networkCode, locationCode, channelPrefix, channelCode string) (string, []byte, error) { + var ( + startSampleRate = data[0].SampleRate + startTimestamp = data[0].Timestamp + startTime = time.UnixMilli(startTimestamp).UTC() + endTime = time.UnixMilli(data[len(data)-1].Timestamp).UTC() + channelName = fmt.Sprintf("%s%s", channelPrefix, channelCode) + ) + + var channelBuffer []int32 + for index, record := range data { + // Make sure timestamp is continuous + if record.Timestamp != startTimestamp+int64(index*1000) { + return "", nil, fmt.Errorf("timestamp is not continuous") + } + + // Make sure sample rate is the same + if record.SampleRate != startSampleRate { + return "", nil, fmt.Errorf("sample rate is not the same") + } + + switch channelCode { + case explorer.EXPLORER_CHANNEL_CODE_Z: + channelBuffer = append(channelBuffer, record.Z_Axis...) + case explorer.EXPLORER_CHANNEL_CODE_E: + channelBuffer = append(channelBuffer, record.E_Axis...) + case explorer.EXPLORER_CHANNEL_CODE_N: + channelBuffer = append(channelBuffer, record.N_Axis...) + } + } + + var sac sacio.SACData + err := sac.Init() + if err != nil { + return "", nil, err + } + sac.SetTime(startTime, endTime.Sub(startTime)) + sac.SetInfo(networkCode, stationCode, locationCode, channelName) + sac.SetBody(h.int32ToFloat32(channelBuffer), startSampleRate) + + // Return filename and bytes (e.g. 2023.193.14.22.51.0317.AS.SHAKE.00.EHZ.D.sac) + filename := fmt.Sprintf("%s.%s.%s.%s.%s.%04d.%s.%s.%s.%s.D.sac", + startTime.Format("2006"), + startTime.Format("002"), + startTime.Format("15"), + startTime.Format("04"), + startTime.Format("05"), + // Get the current millisecond + startTime.Nanosecond()/1000000, + stationCode, networkCode, + locationCode, channelName, + ) + dataBytes, err := sac.Encode(sacio.MSBFIRST) + if err != nil { + return "", nil, err + } + + return filename, dataBytes, nil +} + +func (h *History) int32ToFloat32(arr []int32) []float32 { + floatSlice := make([]float32, len(arr)) + for i, num := range arr { + floatSlice[i] = float32(num) + } + return floatSlice +} diff --git a/api/v1/history/types.go b/api/v1/history/types.go new file mode 100644 index 000000000..d273263b3 --- /dev/null +++ b/api/v1/history/types.go @@ -0,0 +1,18 @@ +package history + +import "time" + +const ( + JSON_MAX_DURATION = time.Hour // The maximum duration of the JSON data to be exported + SAC_MAX_DURATION = time.Hour // The maximum duration of the SAC data to be exported + THRESHOLD = time.Minute // There are uneven gaps between the data if time difference is greater than THRESHOLD +) + +type History struct{} + +type historyBinding struct { + StartTime int64 `form:"start_time" json:"start_time" xml:"start_time" binding:"required,numeric"` + EndTime int64 `form:"end_time" json:"end_time" xml:"end_time" binding:"required,numeric"` + Format string `form:"format" json:"format" xml:"format" binding:"required,oneof=json sac"` + Channel string `form:"channel" json:"channel" xml:"channel" binding:"omitempty"` +} diff --git a/app/v1/inventory/module.go b/api/v1/inventory/module.go similarity index 60% rename from app/v1/inventory/module.go rename to api/v1/inventory/module.go index 115abd1de..dd21e895d 100644 --- a/app/v1/inventory/module.go +++ b/api/v1/inventory/module.go @@ -3,8 +3,10 @@ package inventory import ( "net/http" - "github.com/anyshake/observer/app" + v1 "github.com/anyshake/observer/api/v1" + "github.com/anyshake/observer/drivers/explorer" "github.com/anyshake/observer/server/response" + "github.com/anyshake/observer/utils/logger" "github.com/gin-gonic/gin" ) @@ -15,15 +17,25 @@ import ( // @Produce application/json // @Success 200 {object} response.HttpResponse{data=string} "Successfully get SeisComP XML inventory" // @Produce application/xml -func (i *Inventory) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) { +func (i *Inventory) Register(rg *gin.RouterGroup, resolver *v1.Resolver) error { + var explorerDeps *explorer.ExplorerDependency + err := resolver.Dependency.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + return err + } + rg.GET("/inventory", func(c *gin.Context) { - var binding Binding + var binding inventoryBinding if err := c.ShouldBind(&binding); err != nil { + logger.GetLogger(i.GetApiName()).Errorln(err) response.Error(c, http.StatusBadRequest) return } - inventory := getInventoryString(options.FeatureOptions.Config, options.FeatureOptions.Status) + inventory := i.getInventoryString(resolver.Config, explorerDeps) if binding.Format == "json" { response.Message(c, "Successfully get SeisComP XML inventory", inventory) return @@ -31,4 +43,6 @@ func (i *Inventory) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptio c.Data(http.StatusOK, "application/xml", []byte(inventory)) }) + + return nil } diff --git a/api/v1/inventory/name.go b/api/v1/inventory/name.go new file mode 100644 index 000000000..71ff43bc6 --- /dev/null +++ b/api/v1/inventory/name.go @@ -0,0 +1,5 @@ +package inventory + +func (h *Inventory) GetApiName() string { + return "inventory" +} diff --git a/app/v1/inventory/template.go b/api/v1/inventory/template.go similarity index 72% rename from app/v1/inventory/template.go rename to api/v1/inventory/template.go index 7250ef278..769dc51d6 100644 --- a/app/v1/inventory/template.go +++ b/api/v1/inventory/template.go @@ -5,10 +5,10 @@ import ( "math" "github.com/anyshake/observer/config" - "github.com/anyshake/observer/publisher" + "github.com/anyshake/observer/drivers/explorer" ) -func getInventoryString(config *config.Conf, status *publisher.Status) string { +func (i *Inventory) getInventoryString(config *config.Config, explorerDeps *explorer.ExplorerDependency) string { const xmlTemplate = ` @@ -105,40 +105,34 @@ func getInventoryString(config *config.Conf, status *publisher.Status) string { ` - startTime := status.ReadyTime - currentSampleRate := (len(status.Geophone.EHZ) + len(status.Geophone.EHE) + len(status.Geophone.EHN)) / 3 - if startTime.IsZero() || currentSampleRate == 0 { - return "" - } - - sensorHighFrequency := currentSampleRate / 2 - dataloggerGain := math.Pow(2, float64(config.ADC.Resolution-1)) / config.ADC.FullScale - dataloggerSampleRateNumerator := currentSampleRate - responsePAZGain := config.Geophone.Sensitivity - responsePAZGainFrequency := config.Geophone.Frequency - responsePAZGainNormalizationFrequency := config.Geophone.Frequency - networkCode := config.Station.Network - networkStart := status.ReadyTime.UTC().Format("2006-01-02T15:04:05.0000Z") + sensorHighFrequency := explorerDeps.Health.SampleRate / 2 + dataloggerGain := math.Pow(2, float64(config.Sensor.Resolution-1)) / config.Sensor.FullScale + dataloggerSampleRateNumerator := explorerDeps.Health.SampleRate + responsePAZGain := config.Sensor.Sensitivity + responsePAZGainFrequency := config.Sensor.Frequency + responsePAZGainNormalizationFrequency := config.Sensor.Frequency + networkCode := config.Stream.Network + networkStart := explorerDeps.Health.StartTime.UTC().Format("2006-01-02T15:04:05.0000Z") networkRegion := config.Station.Region - stationCode := config.Station.Station - stationStart := status.ReadyTime.UTC().Format("2006-01-02T15:04:05.0000Z") - stationDescription := fmt.Sprintf("AnyShake Station %s", config.Station.UUID) - stationLatitude := config.Station.Latitude - stationLongitude := config.Station.Longitude - stationElevation := config.Station.Elevation + stationCode := config.Stream.Station + stationStart := explorerDeps.Health.StartTime.UTC().Format("2006-01-02T15:04:05.0000Z") + stationDescription := fmt.Sprintf("AnyShake Station in %s", config.Station.City) + stationLatitude := config.Location.Latitude + stationLongitude := config.Location.Longitude + stationElevation := config.Location.Elevation stationCity := config.Station.City stationCountry := config.Station.Country stationAffiliation := config.Station.Owner - sensorLocationCode := config.Station.Location - sensorLocationStart := status.ReadyTime.UTC().Format("2006-01-02T15:04:05.0000Z") - sensorLocationLatitude := config.Station.Latitude - sensorLocationLongitude := config.Station.Longitude - sensorLocationElevation := config.Station.Elevation + sensorLocationCode := config.Stream.Location + sensorLocationStart := explorerDeps.Health.StartTime.UTC().Format("2006-01-02T15:04:05.0000Z") + sensorLocationLatitude := config.Location.Latitude + sensorLocationLongitude := config.Location.Longitude + sensorLocationElevation := config.Location.Elevation // Stream settings - streamStart := status.ReadyTime.UTC().Format("2006-01-02T15:04:05.0000Z") - streamSampleRateNumerator := currentSampleRate - streamGain := dataloggerGain * config.Geophone.Sensitivity + streamStart := explorerDeps.Health.StartTime.UTC().Format("2006-01-02T15:04:05.0000Z") + streamSampleRateNumerator := explorerDeps.Health.SampleRate + streamGain := dataloggerGain * config.Sensor.Sensitivity return fmt.Sprintf( xmlTemplate, diff --git a/app/v1/inventory/types.go b/api/v1/inventory/types.go similarity index 77% rename from app/v1/inventory/types.go rename to api/v1/inventory/types.go index c590d0f6d..e27caafc1 100644 --- a/app/v1/inventory/types.go +++ b/api/v1/inventory/types.go @@ -2,6 +2,6 @@ package inventory type Inventory struct{} -type Binding struct { +type inventoryBinding struct { Format string `form:"format" json:"format" xml:"format"` } diff --git a/app/v1/mseed/export.go b/api/v1/mseed/export.go similarity index 60% rename from app/v1/mseed/export.go rename to api/v1/mseed/export.go index 057e96585..d9a7885aa 100644 --- a/app/v1/mseed/export.go +++ b/api/v1/mseed/export.go @@ -3,18 +3,14 @@ package mseed import ( "fmt" "os" + "path/filepath" "strings" - - "github.com/anyshake/observer/config" ) -func getMiniSEEDBytes(conf *config.Conf, fileName string) ([]byte, error) { - // Remove slash in file name to avoid path traversal - fileName = strings.ReplaceAll(fileName, "\\", "") +func (m *MSeed) getMiniSeedBytes(basePath, fileName string) ([]byte, error) { + fileName = filepath.Clean(fileName) fileName = strings.ReplaceAll(fileName, "/", "") - basePath := conf.MiniSEED.Path - // Check if file exists, return nil if not exists to avoid 500 error filePath := fmt.Sprintf("%s/%s", basePath, fileName) if _, err := os.Stat(filePath); os.IsNotExist(err) { return nil, nil diff --git a/app/v1/mseed/list.go b/api/v1/mseed/list.go similarity index 51% rename from app/v1/mseed/list.go rename to api/v1/mseed/list.go index a1cfcfcf5..0c2d66a2e 100644 --- a/app/v1/mseed/list.go +++ b/api/v1/mseed/list.go @@ -6,40 +6,32 @@ import ( "path/filepath" "strings" "time" - - "github.com/anyshake/observer/config" ) -func getMiniSEEDList(conf *config.Conf) ([]MiniSEEDFile, error) { - var ( - basePath = conf.MiniSEED.Path - station = conf.Station.Station - network = conf.Station.Network - LifeCycle = conf.MiniSEED.LifeCycle - ) - - var files []MiniSEEDFile +func (m *MSeed) getMiniSeedList(basePath, stationCode, networkCode string, lifeCycle int) ([]miniSeedFileInfo, error) { + var files []miniSeedFileInfo walkFn := func(path string, info os.FileInfo, err error) error { if err != nil { return err } if !info.IsDir() && filepath.Ext(info.Name()) == ".mseed" && - strings.Contains(info.Name(), station) && - strings.Contains(info.Name(), network) { + strings.Contains(info.Name(), stationCode) && + strings.Contains(info.Name(), networkCode) { modTime := info.ModTime().UTC() + // Calculate file TTL var fileTTL int - if LifeCycle > 0 { - fileTTL = LifeCycle - int(time.Since(modTime).Hours()/24) + if lifeCycle > 0 { + fileTTL = lifeCycle - int(time.Since(modTime).Hours()/24) } else { fileTTL = -1 } - files = append(files, MiniSEEDFile{ + files = append(files, miniSeedFileInfo{ TTL: fileTTL, Name: info.Name(), - Time: modTime.UnixMilli(), + Time: modTime.UTC().UnixMilli(), Size: fmt.Sprintf("%d MB", info.Size()/1024/1024), }) } diff --git a/app/v1/mseed/module.go b/api/v1/mseed/module.go similarity index 58% rename from app/v1/mseed/module.go rename to api/v1/mseed/module.go index e5ac98e69..55ff1ac6a 100644 --- a/app/v1/mseed/module.go +++ b/api/v1/mseed/module.go @@ -1,10 +1,13 @@ package mseed import ( + "errors" "net/http" - "github.com/anyshake/observer/app" + v1 "github.com/anyshake/observer/api/v1" "github.com/anyshake/observer/server/response" + "github.com/anyshake/observer/services/miniseed" + "github.com/anyshake/observer/utils/logger" "github.com/gin-gonic/gin" ) @@ -19,27 +22,38 @@ import ( // @Failure 400 {object} response.HttpResponse "Failed to list or export MiniSEED data due to invalid request body" // @Failure 410 {object} response.HttpResponse "Failed to export MiniSEED data due to invalid file name or permission denied" // @Failure 500 {object} response.HttpResponse "Failed to list or export MiniSEED data due to internal server error" -// @Success 200 {object} response.HttpResponse{data=[]MiniSEEDFile} "Successfully get list of MiniSEED files" -func (h *MSeed) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) { +// @Success 200 {object} response.HttpResponse{data=[]miniSeedFileInfo} "Successfully get list of MiniSEED files" +func (h *MSeed) Register(rg *gin.RouterGroup, resolver *v1.Resolver) error { + // Get MiniSEED service configuration + var miniseedService miniseed.MiniSeedService + serviceConfig, ok := resolver.Config.Services[miniseedService.GetServiceName()] + if !ok { + return errors.New("failed to get configuration for MiniSEED service") + } + basePath := serviceConfig.(map[string]any)["path"].(string) + lifeCycle := int(serviceConfig.(map[string]any)["lifecycle"].(float64)) + rg.POST("/mseed", func(c *gin.Context) { - var binding Binding + var binding mseedBinding if err := c.ShouldBind(&binding); err != nil { + logger.GetLogger(h.GetApiName()).Errorln(err) response.Error(c, http.StatusBadRequest) return } if binding.Action == "show" { - fileList, err := getMiniSEEDList(options.FeatureOptions.Config) + fileList, err := h.getMiniSeedList( + basePath, + resolver.Config.Stream.Station, + resolver.Config.Stream.Network, + lifeCycle, + ) if err != nil { + logger.GetLogger(h.GetApiName()).Errorln(err) response.Error(c, http.StatusInternalServerError) return } - if len(fileList) == 0 { - response.Error(c, http.StatusGone) - return - } - response.Message(c, "Successfully get MiniSEED file list", fileList) return } @@ -49,17 +63,15 @@ func (h *MSeed) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) return } - fileBytes, err := getMiniSEEDBytes(options.FeatureOptions.Config, binding.Name) + fileBytes, err := h.getMiniSeedBytes(basePath, binding.Name) if err != nil { + logger.GetLogger(h.GetApiName()).Errorln(err) response.Error(c, http.StatusInternalServerError) return } - if len(fileBytes) == 0 { - response.Error(c, http.StatusGone) - return - } - response.File(c, binding.Name, fileBytes) }) + + return nil } diff --git a/api/v1/mseed/name.go b/api/v1/mseed/name.go new file mode 100644 index 000000000..3d0916a41 --- /dev/null +++ b/api/v1/mseed/name.go @@ -0,0 +1,5 @@ +package mseed + +func (m *MSeed) GetApiName() string { + return "mseed" +} diff --git a/app/v1/mseed/types.go b/api/v1/mseed/types.go similarity index 85% rename from app/v1/mseed/types.go rename to api/v1/mseed/types.go index 255de9db3..b7ec83cb1 100644 --- a/app/v1/mseed/types.go +++ b/api/v1/mseed/types.go @@ -2,12 +2,12 @@ package mseed type MSeed struct{} -type Binding struct { +type mseedBinding struct { Action string `form:"action" json:"action" xml:"action" binding:"required,oneof=export show"` Name string `form:"name" json:"name" xml:"name" binding:"omitempty,endswith=.mseed"` } -type MiniSEEDFile struct { +type miniSeedFileInfo struct { TTL int `json:"ttl"` Time int64 `json:"time"` Size string `json:"size"` diff --git a/api/v1/socket/module.go b/api/v1/socket/module.go new file mode 100644 index 000000000..4033a509c --- /dev/null +++ b/api/v1/socket/module.go @@ -0,0 +1,110 @@ +package socket + +import ( + "encoding/json" + "net/http" + "time" + + v1 "github.com/anyshake/observer/api/v1" + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/server/response" + "github.com/anyshake/observer/utils/logger" + "github.com/gin-gonic/gin" + "github.com/gorilla/websocket" + cmap "github.com/orcaman/concurrent-map/v2" + messagebus "github.com/vardius/message-bus" +) + +func (s *Socket) Register(rg *gin.RouterGroup, resolver *v1.Resolver) error { + s.subscribers = cmap.New[explorer.ExplorerEventHandler]() + s.messageBus = messagebus.New(65535) + + // Forward events to internal message bus + var explorerDeps *explorer.ExplorerDependency + err := resolver.Dependency.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + return err + } + explorerDriver := explorer.ExplorerDriver(&explorer.ExplorerDriverImpl{}) + explorerDriver.Subscribe( + explorerDeps, + s.GetApiName(), + func(data *explorer.ExplorerData) { + s.messageBus.Publish(s.GetApiName(), data) + s.historyBuffer[s.historyBufferIndex] = *data + s.historyBufferIndex = (s.historyBufferIndex + 1) % EXPLORER_BUFFER_SIZE + }, + ) + + rg.GET("/socket", func(c *gin.Context) { + var upgrader = websocket.Upgrader{ + ReadBufferSize: 1024, WriteBufferSize: 1024, EnableCompression: true, + Error: func(w http.ResponseWriter, r *http.Request, status int, reason error) { + logger.GetLogger(s.GetApiName()).Errorf("websocket error, code %d, %s", status, reason) + response.Error(c, http.StatusBadRequest) + }, + CheckOrigin: func(r *http.Request) bool { return true }, + } + + conn, err := upgrader.Upgrade(c.Writer, c.Request, nil) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + return + } + defer conn.Close() + + // Send history buffer to the client + for _, buffer := range s.historyBuffer { + if buffer.Timestamp == 0 { + continue + } + dataBytes, err := json.Marshal(buffer) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + return + } + err = conn.WriteMessage(websocket.TextMessage, dataBytes) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + return + } + // To prevent flooding the client + time.Sleep(time.Millisecond * 10) + } + + // Subscribe to the internal message bus + clienrId := conn.RemoteAddr().String() + handler := func(data *explorer.ExplorerData) { + dataBytes, err := json.Marshal(data) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + return + } + err = conn.WriteMessage(websocket.TextMessage, dataBytes) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + return + } + } + err = s.subscribe(clienrId, handler) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + return + } + defer s.Unsubscribe(clienrId) + + // Listen for incoming messages + for { + _, _, err := conn.ReadMessage() + if err != nil { + return + } + } + }) + + return nil +} diff --git a/api/v1/socket/name.go b/api/v1/socket/name.go new file mode 100644 index 000000000..5d7f920c3 --- /dev/null +++ b/api/v1/socket/name.go @@ -0,0 +1,5 @@ +package socket + +func (s *Socket) GetApiName() string { + return "socket" +} diff --git a/api/v1/socket/subscribe.go b/api/v1/socket/subscribe.go new file mode 100644 index 000000000..b5d198dc2 --- /dev/null +++ b/api/v1/socket/subscribe.go @@ -0,0 +1,16 @@ +package socket + +import ( + "errors" + + "github.com/anyshake/observer/drivers/explorer" +) + +func (s *Socket) subscribe(clientId string, handler explorer.ExplorerEventHandler) error { + if _, ok := s.subscribers.Get(clientId); ok { + return errors.New("this client has already subscribed") + } + s.subscribers.Set(clientId, handler) + s.messageBus.Subscribe(s.GetApiName(), handler) + return nil +} diff --git a/api/v1/socket/types.go b/api/v1/socket/types.go new file mode 100644 index 000000000..6afd2ee9a --- /dev/null +++ b/api/v1/socket/types.go @@ -0,0 +1,16 @@ +package socket + +import ( + "github.com/anyshake/observer/drivers/explorer" + cmap "github.com/orcaman/concurrent-map/v2" + messagebus "github.com/vardius/message-bus" +) + +const EXPLORER_BUFFER_SIZE = 180 + +type Socket struct { + messageBus messagebus.MessageBus // An independent message bus for the socket module + subscribers cmap.ConcurrentMap[string, explorer.ExplorerEventHandler] + historyBuffer [EXPLORER_BUFFER_SIZE]explorer.ExplorerData + historyBufferIndex int +} diff --git a/api/v1/socket/unsubscribe.go b/api/v1/socket/unsubscribe.go new file mode 100644 index 000000000..4b32c3b6f --- /dev/null +++ b/api/v1/socket/unsubscribe.go @@ -0,0 +1,15 @@ +package socket + +import ( + "errors" +) + +func (s *Socket) Unsubscribe(clientId string) error { + fn, ok := s.subscribers.Get(clientId) + if !ok { + return errors.New("this client has not subscribed") + } + s.messageBus.Unsubscribe(s.GetApiName(), fn) + s.subscribers.Remove(clientId) + return nil +} diff --git a/app/v1/station/cpu.go b/api/v1/station/cpu.go similarity index 61% rename from app/v1/station/cpu.go rename to api/v1/station/cpu.go index 308d12dc7..0ed235547 100644 --- a/app/v1/station/cpu.go +++ b/api/v1/station/cpu.go @@ -4,7 +4,7 @@ import ( "github.com/shirou/gopsutil/cpu" ) -func getCPU() cpuModel { +func (c *cpuInfo) get() error { eachCoreInfo, _ := cpu.Info() eachPercent, _ := cpu.Percent(0, true) @@ -14,14 +14,12 @@ func getCPU() cpuModel { for _, v := range eachPercent { totalPercent += v } - return cpuModel{ - Model: currentModel, - Percent: totalPercent / float64(len(eachPercent)), - } + c.Model = currentModel + c.Percent = totalPercent / float64(len(eachPercent)) } else { - return cpuModel{ - Model: "Unknown", - Percent: 0, - } + c.Model = "Unknown" + c.Percent = 0 } + + return nil } diff --git a/api/v1/station/disk.go b/api/v1/station/disk.go new file mode 100644 index 000000000..ec64b61bb --- /dev/null +++ b/api/v1/station/disk.go @@ -0,0 +1,25 @@ +package station + +import ( + "os" + + "github.com/shirou/gopsutil/disk" +) + +func (d *diskInfo) get() error { + cwd, err := os.Getwd() + if err != nil { + return err + } + + usage, err := disk.Usage(cwd) + if err != nil { + return err + } + + d.Total = usage.Total + d.Free = usage.Free + d.Used = usage.Used + d.Percent = usage.UsedPercent + return nil +} diff --git a/api/v1/station/explorer.go b/api/v1/station/explorer.go new file mode 100644 index 000000000..0a997160f --- /dev/null +++ b/api/v1/station/explorer.go @@ -0,0 +1,25 @@ +package station + +import ( + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/utils/timesource" +) + +func (e *explorerInfo) get(timeSource timesource.Source, explorerDeps *explorer.ExplorerDependency) error { + e.DeviceId = explorerDeps.Config.DeviceId + e.Elevation = explorerDeps.Config.Elevation + e.Errors = explorerDeps.Health.Errors + e.Received = explorerDeps.Health.Received + e.SampleRate = explorerDeps.Health.SampleRate + + currentTime, err := timeSource.GetTime() + if err != nil { + return err + } + e.Elapsed = int64(currentTime.Sub(explorerDeps.Health.StartTime).Seconds()) + + e.Latitude = float64(int(explorerDeps.Config.Latitude*1000)) / 1000 + e.Longitude = float64(int(explorerDeps.Config.Longitude*1000)) / 1000 + + return nil +} diff --git a/api/v1/station/memory.go b/api/v1/station/memory.go new file mode 100644 index 000000000..b8036f432 --- /dev/null +++ b/api/v1/station/memory.go @@ -0,0 +1,16 @@ +package station + +import "github.com/shirou/gopsutil/mem" + +func (m *memoryInfo) get() error { + vmStat, err := mem.VirtualMemory() + if err != nil { + return err + } + + m.Total = vmStat.Total + m.Free = vmStat.Free + m.Used = vmStat.Used + m.Percent = vmStat.UsedPercent + return nil +} diff --git a/api/v1/station/module.go b/api/v1/station/module.go new file mode 100644 index 000000000..4bfe07413 --- /dev/null +++ b/api/v1/station/module.go @@ -0,0 +1,77 @@ +package station + +import ( + "net/http" + + v1 "github.com/anyshake/observer/api/v1" + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/server/response" + "github.com/anyshake/observer/utils/logger" + "github.com/gin-gonic/gin" +) + +// @Summary AnyShake Observer station status +// @Description Get Observer station status including system information, memory usage, disk usage, CPU usage, ADC information, geophone information, and location information +// @Router /station [get] +// @Produce application/json +// @Success 200 {object} response.HttpResponse{data=stationInfo} "Successfully read station information" +func (s *Station) Register(rg *gin.RouterGroup, resolver *v1.Resolver) error { + var explorerDeps *explorer.ExplorerDependency + err := resolver.Dependency.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + return err + } + + rg.GET("/station", func(c *gin.Context) { + var explorer explorerInfo + err := explorer.get(resolver.TimeSource, explorerDeps) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + response.Error(c, http.StatusInternalServerError) + return + } + var cpu cpuInfo + err = cpu.get() + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + response.Error(c, http.StatusInternalServerError) + return + } + var disk diskInfo + err = disk.get() + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + response.Error(c, http.StatusInternalServerError) + return + } + var memory memoryInfo + err = memory.get() + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + response.Error(c, http.StatusInternalServerError) + return + } + var os osInfo + err = os.get(resolver.TimeSource) + if err != nil { + logger.GetLogger(s.GetApiName()).Errorln(err) + response.Error(c, http.StatusInternalServerError) + return + } + response.Message(c, "Successfully read station information", stationInfo{ + Station: resolver.Config.Station, + Stream: resolver.Config.Stream, + Sensor: resolver.Config.Sensor, + Explorer: explorer, + CPU: cpu, + Disk: disk, + Memory: memory, + OS: os, + }) + }) + + return nil +} diff --git a/api/v1/station/name.go b/api/v1/station/name.go new file mode 100644 index 000000000..3e68a663e --- /dev/null +++ b/api/v1/station/name.go @@ -0,0 +1,5 @@ +package station + +func (s *Station) GetApiName() string { + return "station" +} diff --git a/api/v1/station/os.go b/api/v1/station/os.go new file mode 100644 index 000000000..3f7a666a8 --- /dev/null +++ b/api/v1/station/os.go @@ -0,0 +1,35 @@ +package station + +import ( + "os" + "runtime" + + "github.com/anyshake/observer/utils/timesource" + "github.com/mackerelio/go-osstat/uptime" + "github.com/wille/osutil" +) + +func (o *osInfo) get(timeSource timesource.Source) error { + hostname, err := os.Hostname() + if err != nil { + return err + } + + up, err := uptime.Get() + if err != nil { + return err + } + + timestamp, err := timeSource.GetTime() + if err != nil { + return err + } + + o.Uptime = int64(up.Seconds()) + o.OS = runtime.GOOS + o.Arch = runtime.GOARCH + o.Distro = osutil.Name + o.Hostname = hostname + o.Timestamp = timestamp.UnixMilli() + return nil +} diff --git a/api/v1/station/types.go b/api/v1/station/types.go new file mode 100644 index 000000000..50e30fc54 --- /dev/null +++ b/api/v1/station/types.go @@ -0,0 +1,57 @@ +package station + +import ( + "github.com/anyshake/observer/config" +) + +type Station struct{} + +type explorerInfo struct { + Elapsed int64 `json:"elapsed"` + Errors int64 `json:"errors"` + Received int64 `json:"received"` + SampleRate int `json:"sample_rate"` + DeviceId uint32 `json:"device_id"` + Latitude float64 `json:"latitude"` + Longitude float64 `json:"longitude"` + Elevation float64 `json:"elevation"` +} + +type memoryInfo struct { + Total uint64 `json:"total"` + Free uint64 `json:"free"` + Used uint64 `json:"used"` + Percent float64 `json:"percent"` +} + +type diskInfo struct { + Total uint64 `json:"total"` + Free uint64 `json:"free"` + Used uint64 `json:"used"` + Percent float64 `json:"percent"` +} + +type osInfo struct { + Uptime int64 `json:"uptime"` + OS string `json:"os"` + Arch string `json:"arch"` + Distro string `json:"distro"` + Hostname string `json:"hostname"` + Timestamp int64 `json:"timestamp"` +} + +type cpuInfo struct { + Model string `json:"model"` + Percent float64 `json:"percent"` +} + +type stationInfo struct { + Explorer explorerInfo `json:"explorer"` + Station config.Station `json:"station"` + Stream config.Stream `json:"stream"` + Sensor config.Sensor `json:"sensor"` + CPU cpuInfo `json:"cpu"` + Disk diskInfo `json:"disk"` + Memory memoryInfo `json:"memory"` + OS osInfo `json:"os"` +} diff --git a/app/v1/trace/cea.go b/api/v1/trace/cea.go similarity index 91% rename from app/v1/trace/cea.go rename to api/v1/trace/cea.go index 70c0ae0ad..1b39f9981 100644 --- a/app/v1/trace/cea.go +++ b/api/v1/trace/cea.go @@ -8,12 +8,11 @@ import ( "time" "github.com/PuerkitoBio/goquery" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type CEA_DASE struct { - DataSourceCache + dataSourceCache } func (c *CEA_DASE) Property() string { @@ -21,7 +20,7 @@ func (c *CEA_DASE) Property() string { } func (c *CEA_DASE) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), c.Time) <= EXPIRATION { + if time.Since(c.Time) <= EXPIRATION { return c.Cache, nil } @@ -77,10 +76,10 @@ func (c *CEA_DASE) Parse(data []byte) (map[string]any, error) { return result, nil } -func (c *CEA_DASE) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { - var list []Event +func (c *CEA_DASE) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { + var list []seismicEvent for _, v := range data["data"].([]any) { - l := Event{ + l := seismicEvent{ Verfied: false, Latitude: v.(map[string]any)["latitude"].(float64), Longitude: v.(map[string]any)["longitude"].(float64), @@ -91,7 +90,7 @@ func (c *CEA_DASE) Format(latitude, longitude float64, data map[string]any) ([]E Magnitude: v.(map[string]any)["magnitude"].(float64), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -99,7 +98,7 @@ func (c *CEA_DASE) Format(latitude, longitude float64, data map[string]any) ([]E return list, nil } -func (c *CEA_DASE) List(latitude, longitude float64) ([]Event, error) { +func (c *CEA_DASE) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := c.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/ceic.go b/api/v1/trace/ceic.go similarity index 85% rename from app/v1/trace/ceic.go rename to api/v1/trace/ceic.go index 97463e390..7890177bf 100644 --- a/app/v1/trace/ceic.go +++ b/api/v1/trace/ceic.go @@ -4,12 +4,11 @@ import ( "encoding/json" "time" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type CEIC struct { - DataSourceCache + dataSourceCache } func (c *CEIC) Property() string { @@ -17,7 +16,7 @@ func (c *CEIC) Property() string { } func (c *CEIC) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), c.Time) <= EXPIRATION { + if time.Since(c.Time) <= EXPIRATION { return c.Cache, nil } @@ -50,12 +49,12 @@ func (c *CEIC) Parse(data []byte) (map[string]any, error) { return result, nil } -func (c *CEIC) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { +func (c *CEIC) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { keys := []string{"O_TIME", "EPI_LAT", "EPI_LON", "EPI_DEPTH", "M", "LOCATION_C"} - var list []Event + var list []seismicEvent for _, v := range data["data"].([]map[string]any) { - if !hasKey(v, keys) || !isEmpty(v, keys) { + if !isMapHasKeys(v, keys) || !isMapKeysEmpty(v, keys) { continue } @@ -75,7 +74,7 @@ func (c *CEIC) Format(latitude, longitude float64, data map[string]any) ([]Event depth = -1 } - l := Event{ + l := seismicEvent{ Depth: depth, Verfied: true, Timestamp: ts.Add(-8 * time.Hour).UnixMilli(), @@ -86,7 +85,7 @@ func (c *CEIC) Format(latitude, longitude float64, data map[string]any) ([]Event Magnitude: string2Float(v["M"].(string)), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -94,7 +93,7 @@ func (c *CEIC) Format(latitude, longitude float64, data map[string]any) ([]Event return list, nil } -func (c *CEIC) List(latitude, longitude float64) ([]Event, error) { +func (c *CEIC) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := c.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/cwa.go b/api/v1/trace/cwa.go similarity index 93% rename from app/v1/trace/cwa.go rename to api/v1/trace/cwa.go index 832e10b63..bc4df8c0c 100644 --- a/app/v1/trace/cwa.go +++ b/api/v1/trace/cwa.go @@ -11,14 +11,13 @@ import ( "time" "github.com/PuerkitoBio/goquery" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) const HOST_IP_TO_BYPASS_GFW = "168.95.246.1:443" type CWA struct { - DataSourceCache + dataSourceCache } func (c *CWA) Property() string { @@ -34,7 +33,7 @@ func (c *CWA) createGFWBypasser() *http.Transport { } func (c *CWA) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), c.Time) <= EXPIRATION { + if time.Since(c.Time) <= EXPIRATION { return c.Cache, nil } @@ -92,10 +91,10 @@ func (c *CWA) Parse(data []byte) (map[string]any, error) { return result, nil } -func (c *CWA) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { - var list []Event +func (c *CWA) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { + var list []seismicEvent for _, v := range data["data"].([]any) { - l := Event{ + l := seismicEvent{ Verfied: true, Latitude: string2Float(v.(map[string]any)["latitude"].(string)), Longitude: string2Float(v.(map[string]any)["longitude"].(string)), @@ -106,7 +105,7 @@ func (c *CWA) Format(latitude, longitude float64, data map[string]any) ([]Event, Magnitude: v.(map[string]any)["magnitude"].(float64), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -114,7 +113,7 @@ func (c *CWA) Format(latitude, longitude float64, data map[string]any) ([]Event, return list, nil } -func (c *CWA) List(latitude, longitude float64) ([]Event, error) { +func (c *CWA) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := c.Fetch() if err != nil { return nil, err diff --git a/api/v1/trace/data.go b/api/v1/trace/data.go new file mode 100644 index 000000000..bf99eb785 --- /dev/null +++ b/api/v1/trace/data.go @@ -0,0 +1,37 @@ +package trace + +import "strconv" + +func string2Float(num string) float64 { + r, err := strconv.ParseFloat(num, 64) + if err != nil { + return 0.0 + } + + return r +} + +func isMapKeysEmpty(m map[string]any, k []string) bool { + for _, v := range k { + switch m[v].(type) { + case string: + if len(m[v].(string)) == 0 { + return false + } + default: + continue + } + } + + return true +} + +func isMapHasKeys(m map[string]any, k []string) bool { + for _, v := range k { + if _, ok := m[v]; !ok { + return false + } + } + + return true +} diff --git a/app/v1/trace/hko.go b/api/v1/trace/hko.go similarity index 85% rename from app/v1/trace/hko.go rename to api/v1/trace/hko.go index 9824b1a1d..ef0b6e109 100644 --- a/app/v1/trace/hko.go +++ b/api/v1/trace/hko.go @@ -5,13 +5,12 @@ import ( "strings" "time" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" "github.com/sbabiv/xml2map" ) type HKO struct { - DataSourceCache + dataSourceCache } func (h *HKO) Property() string { @@ -19,7 +18,7 @@ func (h *HKO) Property() string { } func (h *HKO) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), h.Time) <= EXPIRATION { + if time.Since(h.Time) <= EXPIRATION { return h.Cache, nil } @@ -49,15 +48,15 @@ func (h *HKO) Parse(data []byte) (map[string]any, error) { return result, nil } -func (h *HKO) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { +func (h *HKO) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { events, ok := data["Earthquake"].(map[string]any)["EventGroup"].(map[string]any)["Event"] if !ok { return nil, fmt.Errorf("source data is not valid") } - var list []Event + var list []seismicEvent for _, v := range events.([]map[string]any) { - if !hasKey(v, []string{ + if !isMapHasKeys(v, []string{ "Verify", "HKTDate", "HKTTime", "City", "Region", "Lat", "Lon", "Mag", }) { @@ -71,7 +70,7 @@ func (h *HKO) Format(latitude, longitude float64, data map[string]any) ([]Event, continue } - l := Event{ + l := seismicEvent{ Depth: -1, Verfied: v["Verify"].(string) == "Y", Timestamp: ts.Add(-8 * time.Hour).UnixMilli(), @@ -82,7 +81,7 @@ func (h *HKO) Format(latitude, longitude float64, data map[string]any) ([]Event, Magnitude: string2Float(v["Mag"].(string)), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -90,7 +89,7 @@ func (h *HKO) Format(latitude, longitude float64, data map[string]any) ([]Event, return list, nil } -func (h *HKO) List(latitude, longitude float64) ([]Event, error) { +func (h *HKO) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := h.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/ingv.go b/api/v1/trace/ingv.go similarity index 90% rename from app/v1/trace/ingv.go rename to api/v1/trace/ingv.go index 5bb994b09..bf10fd742 100644 --- a/app/v1/trace/ingv.go +++ b/api/v1/trace/ingv.go @@ -6,12 +6,11 @@ import ( "strings" "time" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type INGV struct { - DataSourceCache + dataSourceCache } func (c *INGV) Property() string { @@ -19,7 +18,7 @@ func (c *INGV) Property() string { } func (c *INGV) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), c.Time) <= EXPIRATION { + if time.Since(c.Time) <= EXPIRATION { return c.Cache, nil } @@ -74,10 +73,10 @@ func (c *INGV) Parse(data []byte) (map[string]any, error) { return result, nil } -func (c *INGV) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { - var list []Event +func (c *INGV) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { + var list []seismicEvent for _, v := range data["data"].([]any) { - l := Event{ + l := seismicEvent{ Verfied: true, Latitude: v.(map[string]any)["latitude"].(float64), Longitude: v.(map[string]any)["longitude"].(float64), @@ -88,7 +87,7 @@ func (c *INGV) Format(latitude, longitude float64, data map[string]any) ([]Event Magnitude: v.(map[string]any)["magnitude"].(float64), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -96,7 +95,7 @@ func (c *INGV) Format(latitude, longitude float64, data map[string]any) ([]Event return list, nil } -func (c *INGV) List(latitude, longitude float64) ([]Event, error) { +func (c *INGV) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := c.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/jma.go b/api/v1/trace/jma.go similarity index 87% rename from app/v1/trace/jma.go rename to api/v1/trace/jma.go index e1592f76d..02bf61b12 100644 --- a/app/v1/trace/jma.go +++ b/api/v1/trace/jma.go @@ -5,12 +5,11 @@ import ( "strings" "time" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type JMA struct { - DataSourceCache + dataSourceCache } func (j *JMA) Property() string { @@ -18,7 +17,7 @@ func (j *JMA) Property() string { } func (j *JMA) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), j.Time) <= EXPIRATION { + if time.Since(j.Time) <= EXPIRATION { return j.Cache, nil } @@ -51,12 +50,12 @@ func (j *JMA) Parse(data []byte) (map[string]any, error) { return result, nil } -func (j *JMA) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { +func (j *JMA) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { keys := []string{"anm", "mag", "cod", "at"} - var list []Event + var list []seismicEvent for _, v := range data["data"].([]map[string]any) { - if !hasKey(v, keys) || !isEmpty(v, keys) { + if !isMapHasKeys(v, keys) || !isMapKeysEmpty(v, keys) { continue } @@ -65,7 +64,7 @@ func (j *JMA) Format(latitude, longitude float64, data map[string]any) ([]Event, continue } - l := Event{ + l := seismicEvent{ Depth: j.getDepth(v["cod"].(string)), Verfied: true, Timestamp: ts.Add(-9 * time.Hour).UnixMilli(), @@ -76,7 +75,7 @@ func (j *JMA) Format(latitude, longitude float64, data map[string]any) ([]Event, Magnitude: string2Float(v["mag"].(string)), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -84,7 +83,7 @@ func (j *JMA) Format(latitude, longitude float64, data map[string]any) ([]Event, return list, nil } -func (j *JMA) List(latitude, longitude float64) ([]Event, error) { +func (j *JMA) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := j.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/kma.go b/api/v1/trace/kma.go similarity index 92% rename from app/v1/trace/kma.go rename to api/v1/trace/kma.go index 2b3622386..fbe0dfbb9 100644 --- a/app/v1/trace/kma.go +++ b/api/v1/trace/kma.go @@ -7,12 +7,11 @@ import ( "time" "github.com/PuerkitoBio/goquery" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type KMA struct { - DataSourceCache + dataSourceCache } func (k *KMA) Property() string { @@ -20,7 +19,7 @@ func (k *KMA) Property() string { } func (k *KMA) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), k.Time) <= EXPIRATION { + if time.Since(k.Time) <= EXPIRATION { return k.Cache, nil } @@ -79,10 +78,10 @@ func (k *KMA) Parse(data []byte) (map[string]any, error) { return result, nil } -func (k *KMA) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { - var list []Event +func (k *KMA) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { + var list []seismicEvent for _, v := range data["data"].([]any) { - l := Event{ + l := seismicEvent{ Verfied: true, Latitude: v.(map[string]any)["latitude"].(float64), Longitude: v.(map[string]any)["longitude"].(float64), @@ -93,7 +92,7 @@ func (k *KMA) Format(latitude, longitude float64, data map[string]any) ([]Event, Magnitude: v.(map[string]any)["magnitude"].(float64), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -101,7 +100,7 @@ func (k *KMA) Format(latitude, longitude float64, data map[string]any) ([]Event, return list, nil } -func (k *KMA) List(latitude, longitude float64) ([]Event, error) { +func (k *KMA) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := k.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/module.go b/api/v1/trace/module.go similarity index 66% rename from app/v1/trace/module.go rename to api/v1/trace/module.go index c7cfc9108..f309c86ea 100644 --- a/app/v1/trace/module.go +++ b/api/v1/trace/module.go @@ -3,8 +3,10 @@ package trace import ( "net/http" - "github.com/anyshake/observer/app" + v1 "github.com/anyshake/observer/api/v1" + "github.com/anyshake/observer/drivers/explorer" "github.com/anyshake/observer/server/response" + "github.com/anyshake/observer/utils/logger" "github.com/gin-gonic/gin" ) @@ -16,9 +18,9 @@ import ( // @Param source formData string true "Use `show` to get available sources first, then choose one and request again to get events" // @Failure 400 {object} response.HttpResponse "Failed to read earthquake event list due to invalid data source" // @Failure 500 {object} response.HttpResponse "Failed to read earthquake event list due to failed to read data source" -// @Success 200 {object} response.HttpResponse{data=[]Event} "Successfully read the list of earthquake events" -func (t *Trace) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) { - sources := map[string]DataSource{ +// @Success 200 {object} response.HttpResponse{data=[]seismicEvent} "Successfully read the list of earthquake events" +func (t *Trace) Register(rg *gin.RouterGroup, resolver *v1.Resolver) error { + sources := map[string]dataSource{ "CWA": &CWA{}, "HKO": &HKO{}, "JMA": &JMA{}, @@ -31,9 +33,19 @@ func (t *Trace) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) "CEA_DASE": &CEA_DASE{}, } + var explorerDeps *explorer.ExplorerDependency + err := resolver.Dependency.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + return err + } + rg.POST("/trace", func(c *gin.Context) { - var binding Binding + var binding traceBinding if err := c.ShouldBind(&binding); err != nil { + logger.GetLogger(t.GetApiName()).Errorln(err) response.Error(c, http.StatusBadRequest) return } @@ -57,23 +69,29 @@ func (t *Trace) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) return } + if err != nil { + logger.GetLogger(t.GetApiName()).Errorln(err) + return + } var ( source, ok = sources[binding.Source] - latitude = options.FeatureOptions.Config.Station.Latitude - longitude = options.FeatureOptions.Config.Station.Longitude + latitude = explorerDeps.Config.Latitude + longitude = explorerDeps.Config.Longitude ) if ok { events, err := source.List(latitude, longitude) if err != nil { + logger.GetLogger(t.GetApiName()).Errorln(err) response.Error(c, http.StatusInternalServerError) return } - sortByTimestamp(events) - response.Message(c, "Successfully read the list of earthquake events", events) + response.Message(c, "Successfully read the list of earthquake events", sortSeismicEvents(events)) return } response.Error(c, http.StatusBadRequest) }) + + return nil } diff --git a/api/v1/trace/name.go b/api/v1/trace/name.go new file mode 100644 index 000000000..f9b7946ca --- /dev/null +++ b/api/v1/trace/name.go @@ -0,0 +1,5 @@ +package trace + +func (t *Trace) GetApiName() string { + return "trace" +} diff --git a/app/v1/trace/scea-b.go b/api/v1/trace/scea-b.go similarity index 82% rename from app/v1/trace/scea-b.go rename to api/v1/trace/scea-b.go index 5bc99baba..2f1dd79b6 100644 --- a/app/v1/trace/scea-b.go +++ b/api/v1/trace/scea-b.go @@ -5,12 +5,11 @@ import ( "fmt" "time" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type SCEA_B struct { - DataSourceCache + dataSourceCache } func (s *SCEA_B) Property() string { @@ -18,7 +17,7 @@ func (s *SCEA_B) Property() string { } func (s *SCEA_B) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), s.Time) <= EXPIRATION { + if time.Since(s.Time) <= EXPIRATION { return s.Cache, nil } @@ -50,16 +49,16 @@ func (s *SCEA_B) Parse(data []byte) (map[string]any, error) { return result, nil } -func (s *SCEA_B) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { +func (s *SCEA_B) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { keys := []string{"eventId", "shockTime", "longitude", "latitude", "placeName", "magnitude", "depth"} - var list []Event + var list []seismicEvent for _, v := range data["data"].([]any) { - if !hasKey(v.(map[string]any), keys) || !isEmpty(v.(map[string]any), keys) { + if !isMapHasKeys(v.(map[string]any), keys) || !isMapKeysEmpty(v.(map[string]any), keys) { continue } - l := Event{ + l := seismicEvent{ Verfied: true, Depth: v.(map[string]any)["depth"].(float64), Event: v.(map[string]any)["eventId"].(string), @@ -70,7 +69,7 @@ func (s *SCEA_B) Format(latitude, longitude float64, data map[string]any) ([]Eve Timestamp: time.UnixMilli(int64(v.(map[string]any)["shockTime"].(float64))).UnixMilli(), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -78,7 +77,7 @@ func (s *SCEA_B) Format(latitude, longitude float64, data map[string]any) ([]Eve return list, nil } -func (s *SCEA_B) List(latitude, longitude float64) ([]Event, error) { +func (s *SCEA_B) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := s.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/scea-e.go b/api/v1/trace/scea-e.go similarity index 80% rename from app/v1/trace/scea-e.go rename to api/v1/trace/scea-e.go index bcd04908a..824b394cc 100644 --- a/app/v1/trace/scea-e.go +++ b/api/v1/trace/scea-e.go @@ -3,13 +3,12 @@ package trace import ( "time" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type SCEA_E struct { SCEA_B - DataSourceCache + dataSourceCache } func (s *SCEA_E) Property() string { @@ -17,7 +16,7 @@ func (s *SCEA_E) Property() string { } func (s *SCEA_E) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), s.Time) <= EXPIRATION { + if time.Since(s.Time) <= EXPIRATION { return s.Cache, nil } @@ -36,16 +35,16 @@ func (s *SCEA_E) Fetch() ([]byte, error) { return res, nil } -func (s *SCEA_E) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { +func (s *SCEA_E) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { keys := []string{"eventId", "shockTime", "longitude", "latitude", "placeName", "magnitude", "depth"} - var list []Event + var list []seismicEvent for _, v := range data["data"].([]any) { - if !hasKey(v.(map[string]any), keys) || !isEmpty(v.(map[string]any), keys) { + if !isMapHasKeys(v.(map[string]any), keys) || !isMapKeysEmpty(v.(map[string]any), keys) { continue } - l := Event{ + l := seismicEvent{ Verfied: true, Depth: -1, Event: v.(map[string]any)["eventId"].(string), @@ -56,7 +55,7 @@ func (s *SCEA_E) Format(latitude, longitude float64, data map[string]any) ([]Eve Timestamp: time.UnixMilli(int64(v.(map[string]any)["shockTime"].(float64))).UnixMilli(), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -64,7 +63,7 @@ func (s *SCEA_E) Format(latitude, longitude float64, data map[string]any) ([]Eve return list, nil } -func (s *SCEA_E) List(latitude, longitude float64) ([]Event, error) { +func (s *SCEA_E) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := s.Fetch() if err != nil { return nil, err diff --git a/api/v1/trace/sort.go b/api/v1/trace/sort.go new file mode 100644 index 000000000..15c5a94c3 --- /dev/null +++ b/api/v1/trace/sort.go @@ -0,0 +1 @@ +package trace diff --git a/api/v1/trace/types.go b/api/v1/trace/types.go new file mode 100644 index 000000000..4c4be15c9 --- /dev/null +++ b/api/v1/trace/types.go @@ -0,0 +1,42 @@ +package trace + +import "time" + +const EXPIRATION = time.Minute // Cache expiration duration for calling external API response + +type Trace struct{} + +type traceBinding struct { + Source string `form:"source" json:"source" xml:"source" binding:"required"` +} + +type seismicEventEstimation struct { + P float64 `json:"p"` + S float64 `json:"s"` +} + +type seismicEvent struct { + Verfied bool `json:"verfied"` + Timestamp int64 `json:"timestamp"` + Event string `json:"event"` + Region string `json:"region"` + Depth float64 `json:"depth"` + Latitude float64 `json:"latitude"` + Longitude float64 `json:"longitude"` + Distance float64 `json:"distance"` + Magnitude float64 `json:"magnitude"` + Estimation seismicEventEstimation `json:"estimation"` +} + +type dataSource interface { + Property() string + Fetch() ([]byte, error) + Parse([]byte) (map[string]any, error) + List(latitude, longitude float64) ([]seismicEvent, error) + Format(float64, float64, map[string]any) ([]seismicEvent, error) +} + +type dataSourceCache struct { + Time time.Time + Cache []byte +} diff --git a/app/v1/trace/usgs.go b/api/v1/trace/usgs.go similarity index 81% rename from app/v1/trace/usgs.go rename to api/v1/trace/usgs.go index 77eb07f53..885d8b410 100644 --- a/app/v1/trace/usgs.go +++ b/api/v1/trace/usgs.go @@ -5,12 +5,11 @@ import ( "fmt" "time" - "github.com/anyshake/observer/utils/duration" "github.com/anyshake/observer/utils/request" ) type USGS struct { - DataSourceCache + dataSourceCache } func (u *USGS) Property() string { @@ -18,7 +17,7 @@ func (u *USGS) Property() string { } func (u *USGS) Fetch() ([]byte, error) { - if duration.Difference(time.Now(), u.Time) <= EXPIRATION { + if time.Since(u.Time) <= EXPIRATION { return u.Cache, nil } @@ -47,27 +46,27 @@ func (u *USGS) Parse(data []byte) (map[string]any, error) { return result, nil } -func (u *USGS) Format(latitude, longitude float64, data map[string]any) ([]Event, error) { +func (u *USGS) Format(latitude, longitude float64, data map[string]any) ([]seismicEvent, error) { events, ok := data["features"] if !ok { return nil, fmt.Errorf("source data is not valid") } - var list []Event + var list []seismicEvent for _, v := range events.([]any) { - if !hasKey(v.(map[string]any), []string{"properties"}) { + if !isMapHasKeys(v.(map[string]any), []string{"properties"}) { continue } properties := v.(map[string]any)["properties"] - if !hasKey(properties.(map[string]any), []string{ + if !isMapHasKeys(properties.(map[string]any), []string{ "mag", "place", "time", "type", "title", }) { continue } geometry := v.(map[string]any)["geometry"] - if !hasKey(geometry.(map[string]any), []string{"coordinates"}) { + if !isMapHasKeys(geometry.(map[string]any), []string{"coordinates"}) { continue } @@ -80,7 +79,7 @@ func (u *USGS) Format(latitude, longitude float64, data map[string]any) ([]Event continue } - l := Event{ + l := seismicEvent{ Depth: coordinates.([]any)[2].(float64), Verfied: true, Timestamp: int64(properties.(map[string]any)["time"].(float64)), @@ -91,7 +90,7 @@ func (u *USGS) Format(latitude, longitude float64, data map[string]any) ([]Event Magnitude: properties.(map[string]any)["mag"].(float64), } l.Distance = getDistance(latitude, l.Latitude, longitude, l.Longitude) - l.Estimation = getEstimation(l.Depth, l.Distance) + l.Estimation = getSeismicEstimation(l.Depth, l.Distance) list = append(list, l) } @@ -99,7 +98,7 @@ func (u *USGS) Format(latitude, longitude float64, data map[string]any) ([]Event return list, nil } -func (u *USGS) List(latitude, longitude float64) ([]Event, error) { +func (u *USGS) List(latitude, longitude float64) ([]seismicEvent, error) { res, err := u.Fetch() if err != nil { return nil, err diff --git a/app/v1/trace/estimate.go b/api/v1/trace/utils.go similarity index 88% rename from app/v1/trace/estimate.go rename to api/v1/trace/utils.go index 3ceede961..b3e4359b2 100644 --- a/app/v1/trace/estimate.go +++ b/api/v1/trace/utils.go @@ -1,8 +1,38 @@ package trace -import "math" +import ( + "math" + "sort" +) -func getEstimation(depth, distance float64) estimation { +func sortSeismicEvents(events []seismicEvent) []seismicEvent { + sort.Slice(events, func(i, j int) bool { + return events[i].Timestamp > events[j].Timestamp + }) + + return events +} + +func getDistance(lat1, lat2, lng1, lng2 float64) float64 { + var ( + radius = 6378.137 + rad = math.Pi / 180.0 + ) + lat1 = lat1 * rad + lng1 = lng1 * rad + lat2 = lat2 * rad + lng2 = lng2 * rad + + var ( + a = lat1 - lat2 + b = lng1 - lng2 + cal = 2 * math.Asin(math.Sqrt(math.Pow(math.Sin(a/2), 2)+math.Cos(lat1)*math.Cos(lat2)*math.Pow(math.Sin(b/2), 2))) * radius + result = math.Round(cal*10000) / 10000 + ) + return result +} + +func getSeismicEstimation(depth, distance float64) seismicEventEstimation { var ( INTERCEPT = []float64{8.567052, 7.5333714, 6.667651, 8.562906, 7.877903, 7.191011, 6.5055184} SLOPE = []float64{0.23335281, 0.23347212, 0.23335606, 0.23335613, 0.23335539, 0.23335367, 0.23335291} @@ -96,9 +126,9 @@ func getEstimation(depth, distance float64) estimation { sWave = distance / 3.5 ) if depth == -1 { - return estimation{P: pWave, S: sWave} + return seismicEventEstimation{P: pWave, S: sWave} } else if distance <= 0 { - return estimation{P: 0, S: 0} + return seismicEventEstimation{P: 0, S: 0} } i := (int)(depth / 5.0) @@ -114,11 +144,11 @@ func getEstimation(depth, distance float64) estimation { length = len(fArr2) - 1 ) if distance > fArr2[length] { - return estimation{P: pWave, S: (SLOPE[i] * distance) + INTERCEPT[i]} + return seismicEventEstimation{P: pWave, S: (SLOPE[i] * distance) + INTERCEPT[i]} } if math.Abs(distance-fArr2[length]) < 0.0 { - return estimation{P: pWave, S: fArr3[len(fArr3)-1]} + return seismicEventEstimation{P: pWave, S: fArr3[len(fArr3)-1]} } for i2 < length && distance >= fArr2[i2] { i2++ @@ -128,5 +158,5 @@ func getEstimation(depth, distance float64) estimation { i3 = i2 - 1 i4 = i3 + 1 ) - return estimation{P: pWave, S: fArr3[i3] + ((fArr3[i4] - fArr3[i3]) * ((distance - fArr2[i3]) / (fArr2[i4] - fArr2[i3])))} + return seismicEventEstimation{P: pWave, S: fArr3[i3] + ((fArr3[i4] - fArr3[i3]) * ((distance - fArr2[i3]) / (fArr2[i4] - fArr2[i3])))} } diff --git a/api/v1/types.go b/api/v1/types.go new file mode 100644 index 000000000..f526220c9 --- /dev/null +++ b/api/v1/types.go @@ -0,0 +1,15 @@ +package v1 + +import ( + "github.com/anyshake/observer/services" + "github.com/gin-gonic/gin" +) + +type Resolver struct { + *services.Options +} + +type Endpoint interface { + Register(*gin.RouterGroup, *Resolver) error + GetApiName() string +} diff --git a/api/v2/generated.go b/api/v2/generated.go new file mode 100644 index 000000000..a826e0a13 --- /dev/null +++ b/api/v2/generated.go @@ -0,0 +1,3117 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package v2 + +import ( + "bytes" + "context" + "embed" + "errors" + "fmt" + "strconv" + "sync" + "sync/atomic" + + "github.com/99designs/gqlgen/graphql" + "github.com/99designs/gqlgen/graphql/introspection" + gqlparser "github.com/vektah/gqlparser/v2" + "github.com/vektah/gqlparser/v2/ast" +) + +// region ************************** generated!.gotpl ************************** + +// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface. +func NewExecutableSchema(cfg Config) graphql.ExecutableSchema { + return &executableSchema{ + schema: cfg.Schema, + resolvers: cfg.Resolvers, + directives: cfg.Directives, + complexity: cfg.Complexity, + } +} + +type Config struct { + Schema *ast.Schema + Resolvers ResolverRoot + Directives DirectiveRoot + Complexity ComplexityRoot +} + +type ResolverRoot interface { + Query() QueryResolver +} + +type DirectiveRoot struct { +} + +type ComplexityRoot struct { + Query struct { + Test func(childComplexity int) int + } +} + +type QueryResolver interface { + Test(ctx context.Context) (*bool, error) +} + +type executableSchema struct { + schema *ast.Schema + resolvers ResolverRoot + directives DirectiveRoot + complexity ComplexityRoot +} + +func (e *executableSchema) Schema() *ast.Schema { + if e.schema != nil { + return e.schema + } + return parsedSchema +} + +func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) { + ec := executionContext{nil, e, 0, 0, nil} + _ = ec + switch typeName + "." + field { + + case "Query.test": + if e.complexity.Query.Test == nil { + break + } + + return e.complexity.Query.Test(childComplexity), true + + } + return 0, false +} + +func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler { + rc := graphql.GetOperationContext(ctx) + ec := executionContext{rc, e, 0, 0, make(chan graphql.DeferredResult)} + inputUnmarshalMap := graphql.BuildUnmarshalerMap() + first := true + + switch rc.Operation.Operation { + case ast.Query: + return func(ctx context.Context) *graphql.Response { + var response graphql.Response + var data graphql.Marshaler + if first { + first = false + ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap) + data = ec._Query(ctx, rc.Operation.SelectionSet) + } else { + if atomic.LoadInt32(&ec.pendingDeferred) > 0 { + result := <-ec.deferredResults + atomic.AddInt32(&ec.pendingDeferred, -1) + data = result.Result + response.Path = result.Path + response.Label = result.Label + response.Errors = result.Errors + } else { + return nil + } + } + var buf bytes.Buffer + data.MarshalGQL(&buf) + response.Data = buf.Bytes() + if atomic.LoadInt32(&ec.deferred) > 0 { + hasNext := atomic.LoadInt32(&ec.pendingDeferred) > 0 + response.HasNext = &hasNext + } + + return &response + } + + default: + return graphql.OneShot(graphql.ErrorResponse(ctx, "unsupported GraphQL operation")) + } +} + +type executionContext struct { + *graphql.OperationContext + *executableSchema + deferred int32 + pendingDeferred int32 + deferredResults chan graphql.DeferredResult +} + +func (ec *executionContext) processDeferredGroup(dg graphql.DeferredGroup) { + atomic.AddInt32(&ec.pendingDeferred, 1) + go func() { + ctx := graphql.WithFreshResponseContext(dg.Context) + dg.FieldSet.Dispatch(ctx) + ds := graphql.DeferredResult{ + Path: dg.Path, + Label: dg.Label, + Result: dg.FieldSet, + Errors: graphql.GetErrors(ctx), + } + // null fields should bubble up + if dg.FieldSet.Invalids > 0 { + ds.Result = graphql.Null + } + ec.deferredResults <- ds + }() +} + +func (ec *executionContext) introspectSchema() (*introspection.Schema, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapSchema(ec.Schema()), nil +} + +func (ec *executionContext) introspectType(name string) (*introspection.Type, error) { + if ec.DisableIntrospection { + return nil, errors.New("introspection disabled") + } + return introspection.WrapTypeFromDef(ec.Schema(), ec.Schema().Types[name]), nil +} + +//go:embed "schema.graphqls" +var sourcesFS embed.FS + +func sourceData(filename string) string { + data, err := sourcesFS.ReadFile(filename) + if err != nil { + panic(fmt.Sprintf("codegen problem: %s not available", filename)) + } + return string(data) +} + +var sources = []*ast.Source{ + {Name: "schema.graphqls", Input: sourceData("schema.graphqls"), BuiltIn: false}, +} +var parsedSchema = gqlparser.MustLoadSchema(sources...) + +// endregion ************************** generated!.gotpl ************************** + +// region ***************************** args.gotpl ***************************** + +func (ec *executionContext) field_Query___type_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 string + if tmp, ok := rawArgs["name"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("name")) + arg0, err = ec.unmarshalNString2string(ctx, tmp) + if err != nil { + return nil, err + } + } + args["name"] = arg0 + return args, nil +} + +func (ec *executionContext) field___Type_enumValues_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 bool + if tmp, ok := rawArgs["includeDeprecated"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeDeprecated"] = arg0 + return args, nil +} + +func (ec *executionContext) field___Type_fields_args(ctx context.Context, rawArgs map[string]interface{}) (map[string]interface{}, error) { + var err error + args := map[string]interface{}{} + var arg0 bool + if tmp, ok := rawArgs["includeDeprecated"]; ok { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("includeDeprecated")) + arg0, err = ec.unmarshalOBoolean2bool(ctx, tmp) + if err != nil { + return nil, err + } + } + args["includeDeprecated"] = arg0 + return args, nil +} + +// endregion ***************************** args.gotpl ***************************** + +// region ************************** directives.gotpl ************************** + +// endregion ************************** directives.gotpl ************************** + +// region **************************** field.gotpl ***************************** + +func (ec *executionContext) _Query_test(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query_test(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.resolvers.Query().Test(rctx) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*bool) + fc.Result = res + return ec.marshalOBoolean2ᚖbool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query_test(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: true, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) _Query___type(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectType(fc.Args["name"].(string)) + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query___type(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field_Query___type_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) _Query___schema(ctx context.Context, field graphql.CollectedField) (ret graphql.Marshaler) { + fc, err := ec.fieldContext_Query___schema(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return ec.introspectSchema() + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Schema) + fc.Result = res + return ec.marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext_Query___schema(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "Query", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "description": + return ec.fieldContext___Schema_description(ctx, field) + case "types": + return ec.fieldContext___Schema_types(ctx, field) + case "queryType": + return ec.fieldContext___Schema_queryType(ctx, field) + case "mutationType": + return ec.fieldContext___Schema_mutationType(ctx, field) + case "subscriptionType": + return ec.fieldContext___Schema_subscriptionType(ctx, field) + case "directives": + return ec.fieldContext___Schema_directives(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Schema", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_locations(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_locations(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Locations, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]string) + fc.Result = res + return ec.marshalN__DirectiveLocation2ᚕstringᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_locations(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __DirectiveLocation does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_args(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_args(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Directive_isRepeatable(ctx context.Context, field graphql.CollectedField, obj *introspection.Directive) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Directive_isRepeatable(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsRepeatable, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Directive_isRepeatable(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Directive", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_isDeprecated(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___EnumValue_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.EnumValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___EnumValue_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___EnumValue_deprecationReason(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__EnumValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_args(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_args(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Args, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_args(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_type(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_type(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_isDeprecated(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_isDeprecated(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.IsDeprecated(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(bool) + fc.Result = res + return ec.marshalNBoolean2bool(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_isDeprecated(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type Boolean does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Field_deprecationReason(ctx context.Context, field graphql.CollectedField, obj *introspection.Field) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Field_deprecationReason(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DeprecationReason(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Field_deprecationReason(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Field", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_name(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalNString2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_description(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_type(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_type(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Type, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_type(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___InputValue_defaultValue(ctx context.Context, field graphql.CollectedField, obj *introspection.InputValue) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___InputValue_defaultValue(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.DefaultValue, nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___InputValue_defaultValue(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__InputValue", + Field: field, + IsMethod: false, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_types(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_types(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Types(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_types(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_queryType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_queryType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.QueryType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_queryType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_mutationType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_mutationType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.MutationType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_mutationType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_subscriptionType(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_subscriptionType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SubscriptionType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_subscriptionType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Schema_directives(ctx context.Context, field graphql.CollectedField, obj *introspection.Schema) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Schema_directives(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Directives(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.([]introspection.Directive) + fc.Result = res + return ec.marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirectiveᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Schema_directives(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Schema", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Directive_name(ctx, field) + case "description": + return ec.fieldContext___Directive_description(ctx, field) + case "locations": + return ec.fieldContext___Directive_locations(ctx, field) + case "args": + return ec.fieldContext___Directive_args(ctx, field) + case "isRepeatable": + return ec.fieldContext___Directive_isRepeatable(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Directive", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_kind(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_kind(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Kind(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + if !graphql.HasFieldError(ctx, fc) { + ec.Errorf(ctx, "must not be null") + } + return graphql.Null + } + res := resTmp.(string) + fc.Result = res + return ec.marshalN__TypeKind2string(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_kind(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type __TypeKind does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_name(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_name(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Name(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_name(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_description(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_description(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Description(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_description(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_fields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_fields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Fields(fc.Args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Field) + fc.Result = res + return ec.marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐFieldᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_fields(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___Field_name(ctx, field) + case "description": + return ec.fieldContext___Field_description(ctx, field) + case "args": + return ec.fieldContext___Field_args(ctx, field) + case "type": + return ec.fieldContext___Field_type(ctx, field) + case "isDeprecated": + return ec.fieldContext___Field_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___Field_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Field", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_fields_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) ___Type_interfaces(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_interfaces(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.Interfaces(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_interfaces(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_possibleTypes(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_possibleTypes(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.PossibleTypes(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_possibleTypes(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_enumValues(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_enumValues(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.EnumValues(fc.Args["includeDeprecated"].(bool)), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.EnumValue) + fc.Result = res + return ec.marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_enumValues(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___EnumValue_name(ctx, field) + case "description": + return ec.fieldContext___EnumValue_description(ctx, field) + case "isDeprecated": + return ec.fieldContext___EnumValue_isDeprecated(ctx, field) + case "deprecationReason": + return ec.fieldContext___EnumValue_deprecationReason(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __EnumValue", field.Name) + }, + } + defer func() { + if r := recover(); r != nil { + err = ec.Recover(ctx, r) + ec.Error(ctx, err) + } + }() + ctx = graphql.WithFieldContext(ctx, fc) + if fc.Args, err = ec.field___Type_enumValues_args(ctx, field.ArgumentMap(ec.Variables)); err != nil { + ec.Error(ctx, err) + return fc, err + } + return fc, nil +} + +func (ec *executionContext) ___Type_inputFields(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_inputFields(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.InputFields(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.([]introspection.InputValue) + fc.Result = res + return ec.marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_inputFields(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "name": + return ec.fieldContext___InputValue_name(ctx, field) + case "description": + return ec.fieldContext___InputValue_description(ctx, field) + case "type": + return ec.fieldContext___InputValue_type(ctx, field) + case "defaultValue": + return ec.fieldContext___InputValue_defaultValue(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __InputValue", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_ofType(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_ofType(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.OfType(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*introspection.Type) + fc.Result = res + return ec.marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_ofType(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + switch field.Name { + case "kind": + return ec.fieldContext___Type_kind(ctx, field) + case "name": + return ec.fieldContext___Type_name(ctx, field) + case "description": + return ec.fieldContext___Type_description(ctx, field) + case "fields": + return ec.fieldContext___Type_fields(ctx, field) + case "interfaces": + return ec.fieldContext___Type_interfaces(ctx, field) + case "possibleTypes": + return ec.fieldContext___Type_possibleTypes(ctx, field) + case "enumValues": + return ec.fieldContext___Type_enumValues(ctx, field) + case "inputFields": + return ec.fieldContext___Type_inputFields(ctx, field) + case "ofType": + return ec.fieldContext___Type_ofType(ctx, field) + case "specifiedByURL": + return ec.fieldContext___Type_specifiedByURL(ctx, field) + } + return nil, fmt.Errorf("no field named %q was found under type __Type", field.Name) + }, + } + return fc, nil +} + +func (ec *executionContext) ___Type_specifiedByURL(ctx context.Context, field graphql.CollectedField, obj *introspection.Type) (ret graphql.Marshaler) { + fc, err := ec.fieldContext___Type_specifiedByURL(ctx, field) + if err != nil { + return graphql.Null + } + ctx = graphql.WithFieldContext(ctx, fc) + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = graphql.Null + } + }() + resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { + ctx = rctx // use context from middleware stack in children + return obj.SpecifiedByURL(), nil + }) + if err != nil { + ec.Error(ctx, err) + return graphql.Null + } + if resTmp == nil { + return graphql.Null + } + res := resTmp.(*string) + fc.Result = res + return ec.marshalOString2ᚖstring(ctx, field.Selections, res) +} + +func (ec *executionContext) fieldContext___Type_specifiedByURL(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) { + fc = &graphql.FieldContext{ + Object: "__Type", + Field: field, + IsMethod: true, + IsResolver: false, + Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { + return nil, errors.New("field of type String does not have child fields") + }, + } + return fc, nil +} + +// endregion **************************** field.gotpl ***************************** + +// region **************************** input.gotpl ***************************** + +// endregion **************************** input.gotpl ***************************** + +// region ************************** interface.gotpl *************************** + +// endregion ************************** interface.gotpl *************************** + +// region **************************** object.gotpl **************************** + +var queryImplementors = []string{"Query"} + +func (ec *executionContext) _Query(ctx context.Context, sel ast.SelectionSet) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, queryImplementors) + ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{ + Object: "Query", + }) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{ + Object: field.Name, + Field: field, + }) + + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("Query") + case "test": + field := field + + innerFunc := func(ctx context.Context, _ *graphql.FieldSet) (res graphql.Marshaler) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + } + }() + res = ec._Query_test(ctx, field) + return res + } + + rrm := func(ctx context.Context) graphql.Marshaler { + return ec.OperationContext.RootResolverMiddleware(ctx, + func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) }) + } + + out.Concurrently(i, func(ctx context.Context) graphql.Marshaler { return rrm(innerCtx) }) + case "__type": + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___type(ctx, field) + }) + case "__schema": + out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) { + return ec._Query___schema(ctx, field) + }) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __DirectiveImplementors = []string{"__Directive"} + +func (ec *executionContext) ___Directive(ctx context.Context, sel ast.SelectionSet, obj *introspection.Directive) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __DirectiveImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Directive") + case "name": + out.Values[i] = ec.___Directive_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___Directive_description(ctx, field, obj) + case "locations": + out.Values[i] = ec.___Directive_locations(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "args": + out.Values[i] = ec.___Directive_args(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "isRepeatable": + out.Values[i] = ec.___Directive_isRepeatable(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __EnumValueImplementors = []string{"__EnumValue"} + +func (ec *executionContext) ___EnumValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.EnumValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __EnumValueImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__EnumValue") + case "name": + out.Values[i] = ec.___EnumValue_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___EnumValue_description(ctx, field, obj) + case "isDeprecated": + out.Values[i] = ec.___EnumValue_isDeprecated(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "deprecationReason": + out.Values[i] = ec.___EnumValue_deprecationReason(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __FieldImplementors = []string{"__Field"} + +func (ec *executionContext) ___Field(ctx context.Context, sel ast.SelectionSet, obj *introspection.Field) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __FieldImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Field") + case "name": + out.Values[i] = ec.___Field_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___Field_description(ctx, field, obj) + case "args": + out.Values[i] = ec.___Field_args(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "type": + out.Values[i] = ec.___Field_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "isDeprecated": + out.Values[i] = ec.___Field_isDeprecated(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "deprecationReason": + out.Values[i] = ec.___Field_deprecationReason(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __InputValueImplementors = []string{"__InputValue"} + +func (ec *executionContext) ___InputValue(ctx context.Context, sel ast.SelectionSet, obj *introspection.InputValue) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __InputValueImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__InputValue") + case "name": + out.Values[i] = ec.___InputValue_name(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "description": + out.Values[i] = ec.___InputValue_description(ctx, field, obj) + case "type": + out.Values[i] = ec.___InputValue_type(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "defaultValue": + out.Values[i] = ec.___InputValue_defaultValue(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __SchemaImplementors = []string{"__Schema"} + +func (ec *executionContext) ___Schema(ctx context.Context, sel ast.SelectionSet, obj *introspection.Schema) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __SchemaImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Schema") + case "description": + out.Values[i] = ec.___Schema_description(ctx, field, obj) + case "types": + out.Values[i] = ec.___Schema_types(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "queryType": + out.Values[i] = ec.___Schema_queryType(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "mutationType": + out.Values[i] = ec.___Schema_mutationType(ctx, field, obj) + case "subscriptionType": + out.Values[i] = ec.___Schema_subscriptionType(ctx, field, obj) + case "directives": + out.Values[i] = ec.___Schema_directives(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +var __TypeImplementors = []string{"__Type"} + +func (ec *executionContext) ___Type(ctx context.Context, sel ast.SelectionSet, obj *introspection.Type) graphql.Marshaler { + fields := graphql.CollectFields(ec.OperationContext, sel, __TypeImplementors) + + out := graphql.NewFieldSet(fields) + deferred := make(map[string]*graphql.FieldSet) + for i, field := range fields { + switch field.Name { + case "__typename": + out.Values[i] = graphql.MarshalString("__Type") + case "kind": + out.Values[i] = ec.___Type_kind(ctx, field, obj) + if out.Values[i] == graphql.Null { + out.Invalids++ + } + case "name": + out.Values[i] = ec.___Type_name(ctx, field, obj) + case "description": + out.Values[i] = ec.___Type_description(ctx, field, obj) + case "fields": + out.Values[i] = ec.___Type_fields(ctx, field, obj) + case "interfaces": + out.Values[i] = ec.___Type_interfaces(ctx, field, obj) + case "possibleTypes": + out.Values[i] = ec.___Type_possibleTypes(ctx, field, obj) + case "enumValues": + out.Values[i] = ec.___Type_enumValues(ctx, field, obj) + case "inputFields": + out.Values[i] = ec.___Type_inputFields(ctx, field, obj) + case "ofType": + out.Values[i] = ec.___Type_ofType(ctx, field, obj) + case "specifiedByURL": + out.Values[i] = ec.___Type_specifiedByURL(ctx, field, obj) + default: + panic("unknown field " + strconv.Quote(field.Name)) + } + } + out.Dispatch(ctx) + if out.Invalids > 0 { + return graphql.Null + } + + atomic.AddInt32(&ec.deferred, int32(len(deferred))) + + for label, dfs := range deferred { + ec.processDeferredGroup(graphql.DeferredGroup{ + Label: label, + Path: graphql.GetPath(ctx), + FieldSet: dfs, + Context: ctx, + }) + } + + return out +} + +// endregion **************************** object.gotpl **************************** + +// region ***************************** type.gotpl ***************************** + +func (ec *executionContext) unmarshalNBoolean2bool(ctx context.Context, v interface{}) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + res := graphql.MarshalBoolean(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalNString2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalNString2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx context.Context, sel ast.SelectionSet, v introspection.Directive) graphql.Marshaler { + return ec.___Directive(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Directive2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirectiveᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Directive) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Directive2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐDirective(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__DirectiveLocation2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalN__DirectiveLocation2ᚕstringᚄ(ctx context.Context, v interface{}) ([]string, error) { + var vSlice []interface{} + if v != nil { + vSlice = graphql.CoerceList(v) + } + var err error + res := make([]string, len(vSlice)) + for i := range vSlice { + ctx := graphql.WithPathContext(ctx, graphql.NewPathWithIndex(i)) + res[i], err = ec.unmarshalN__DirectiveLocation2string(ctx, vSlice[i]) + if err != nil { + return nil, err + } + } + return res, nil +} + +func (ec *executionContext) marshalN__DirectiveLocation2ᚕstringᚄ(ctx context.Context, sel ast.SelectionSet, v []string) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__DirectiveLocation2string(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx context.Context, sel ast.SelectionSet, v introspection.EnumValue) graphql.Marshaler { + return ec.___EnumValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx context.Context, sel ast.SelectionSet, v introspection.Field) graphql.Marshaler { + return ec.___Field(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx context.Context, sel ast.SelectionSet, v introspection.InputValue) graphql.Marshaler { + return ec.___InputValue(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v introspection.Type) graphql.Marshaler { + return ec.___Type(ctx, sel, &v) +} + +func (ec *executionContext) marshalN__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalN__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +func (ec *executionContext) unmarshalN__TypeKind2string(ctx context.Context, v interface{}) (string, error) { + res, err := graphql.UnmarshalString(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalN__TypeKind2string(ctx context.Context, sel ast.SelectionSet, v string) graphql.Marshaler { + res := graphql.MarshalString(v) + if res == graphql.Null { + if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) { + ec.Errorf(ctx, "the requested element is null which the schema does not allow") + } + } + return res +} + +func (ec *executionContext) unmarshalOBoolean2bool(ctx context.Context, v interface{}) (bool, error) { + res, err := graphql.UnmarshalBoolean(v) + return res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2bool(ctx context.Context, sel ast.SelectionSet, v bool) graphql.Marshaler { + res := graphql.MarshalBoolean(v) + return res +} + +func (ec *executionContext) unmarshalOBoolean2ᚖbool(ctx context.Context, v interface{}) (*bool, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalBoolean(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOBoolean2ᚖbool(ctx context.Context, sel ast.SelectionSet, v *bool) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalBoolean(*v) + return res +} + +func (ec *executionContext) unmarshalOString2ᚖstring(ctx context.Context, v interface{}) (*string, error) { + if v == nil { + return nil, nil + } + res, err := graphql.UnmarshalString(v) + return &res, graphql.ErrorOnPath(ctx, err) +} + +func (ec *executionContext) marshalOString2ᚖstring(ctx context.Context, sel ast.SelectionSet, v *string) graphql.Marshaler { + if v == nil { + return graphql.Null + } + res := graphql.MarshalString(*v) + return res +} + +func (ec *executionContext) marshalO__EnumValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.EnumValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__EnumValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐEnumValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Field2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐFieldᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Field) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Field2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐField(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__InputValue2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValueᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.InputValue) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__InputValue2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐInputValue(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Schema2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐSchema(ctx context.Context, sel ast.SelectionSet, v *introspection.Schema) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Schema(ctx, sel, v) +} + +func (ec *executionContext) marshalO__Type2ᚕgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐTypeᚄ(ctx context.Context, sel ast.SelectionSet, v []introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + ret := make(graphql.Array, len(v)) + var wg sync.WaitGroup + isLen1 := len(v) == 1 + if !isLen1 { + wg.Add(len(v)) + } + for i := range v { + i := i + fc := &graphql.FieldContext{ + Index: &i, + Result: &v[i], + } + ctx := graphql.WithFieldContext(ctx, fc) + f := func(i int) { + defer func() { + if r := recover(); r != nil { + ec.Error(ctx, ec.Recover(ctx, r)) + ret = nil + } + }() + if !isLen1 { + defer wg.Done() + } + ret[i] = ec.marshalN__Type2githubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx, sel, v[i]) + } + if isLen1 { + f(i) + } else { + go f(i) + } + + } + wg.Wait() + + for _, e := range ret { + if e == graphql.Null { + return graphql.Null + } + } + + return ret +} + +func (ec *executionContext) marshalO__Type2ᚖgithubᚗcomᚋ99designsᚋgqlgenᚋgraphqlᚋintrospectionᚐType(ctx context.Context, sel ast.SelectionSet, v *introspection.Type) graphql.Marshaler { + if v == nil { + return graphql.Null + } + return ec.___Type(ctx, sel, v) +} + +// endregion ***************************** type.gotpl ***************************** diff --git a/api/v2/model/models_gen.go b/api/v2/model/models_gen.go new file mode 100644 index 000000000..9942c2794 --- /dev/null +++ b/api/v2/model/models_gen.go @@ -0,0 +1,6 @@ +// Code generated by github.com/99designs/gqlgen, DO NOT EDIT. + +package model + +type Query struct { +} diff --git a/api/v2/resolver.go b/api/v2/resolver.go new file mode 100644 index 000000000..6b3ac9cc2 --- /dev/null +++ b/api/v2/resolver.go @@ -0,0 +1,11 @@ +package v2 + +import "github.com/anyshake/observer/services" + +// This file will not be regenerated automatically. +// +// It serves as dependency injection for your app, add any dependencies you require here. + +type Resolver struct { + *services.Options +} diff --git a/api/v2/schema.graphqls b/api/v2/schema.graphqls new file mode 100644 index 000000000..6f7613900 --- /dev/null +++ b/api/v2/schema.graphqls @@ -0,0 +1,5 @@ +scalar Int64 + +type Query { + test: Boolean +} diff --git a/api/v2/schema.resolvers.go b/api/v2/schema.resolvers.go new file mode 100644 index 000000000..e4ec4419b --- /dev/null +++ b/api/v2/schema.resolvers.go @@ -0,0 +1,20 @@ +package v2 + +// This file will be automatically regenerated based on the schema, any resolver implementations +// will be copied through when generating and any unknown code will be moved to the end. +// Code generated by github.com/99designs/gqlgen version v0.17.49 + +import ( + "context" + "fmt" +) + +// Test is the resolver for the test field. +func (r *queryResolver) Test(ctx context.Context) (*bool, error) { + panic(fmt.Errorf("not implemented: Test - test")) +} + +// Query returns QueryResolver implementation. +func (r *Resolver) Query() QueryResolver { return &queryResolver{r} } + +type queryResolver struct{ *Resolver } diff --git a/app/types.go b/app/types.go deleted file mode 100644 index 992a18395..000000000 --- a/app/types.go +++ /dev/null @@ -1,12 +0,0 @@ -package app - -import "github.com/anyshake/observer/feature" - -type ServerOptions struct { - Gzip int - CORS bool - WebPrefix string - APIPrefix string - RateFactor int - FeatureOptions *feature.FeatureOptions -} diff --git a/app/v1/history/filter.go b/app/v1/history/filter.go deleted file mode 100644 index 078460f57..000000000 --- a/app/v1/history/filter.go +++ /dev/null @@ -1,31 +0,0 @@ -package history - -import ( - "fmt" - "time" - - "github.com/anyshake/observer/app" - "github.com/anyshake/observer/driver/dao" - "github.com/anyshake/observer/publisher" -) - -func filterHistory(start, end int64, limit time.Duration, options *app.ServerOptions) ([]publisher.Geophone, error) { - pdb := options.FeatureOptions.Database - if pdb == nil { - return nil, fmt.Errorf("databse is not connected") - } - - if end-start > limit.Milliseconds() { - return nil, fmt.Errorf("duration is too large") - } - - data, err := dao.Query(pdb, start, end) - if err != nil { - return nil, err - } - - if len(data) == 0 { - return nil, fmt.Errorf("no data found") - } - return data, nil -} diff --git a/app/v1/history/info.go b/app/v1/history/info.go deleted file mode 100644 index f379084e4..000000000 --- a/app/v1/history/info.go +++ /dev/null @@ -1,23 +0,0 @@ -package history - -import ( - "strings" - - "github.com/anyshake/observer/config" - "github.com/anyshake/observer/utils/text" -) - -func getNetwork(conf *config.Conf) string { - network := strings.ToUpper(conf.Station.Network) - return text.TruncateString(network, 2) -} - -func getStation(conf *config.Conf) string { - station := strings.ToUpper(conf.Station.Station) - return text.TruncateString(station, 5) -} - -func getLocation(conf *config.Conf) string { - location := strings.ToUpper(conf.Station.Location) - return text.TruncateString(location, 2) -} diff --git a/app/v1/history/module.go b/app/v1/history/module.go deleted file mode 100644 index 8d9f3c462..000000000 --- a/app/v1/history/module.go +++ /dev/null @@ -1,60 +0,0 @@ -package history - -import ( - "net/http" - - "github.com/anyshake/observer/app" - "github.com/anyshake/observer/server/response" - "github.com/gin-gonic/gin" -) - -// @Summary AnyShake Observer waveform history -// @Description Get waveform count data in specified time range, channel and format, the maximum duration of the waveform data to be exported is 24 hours for JSON and 1 hour for SAC -// @Router /history [post] -// @Accept application/x-www-form-urlencoded -// @Produce application/json -// @Produce application/octet-stream -// @Param start formData int true "Start timestamp of the waveform data to be queried, in milliseconds" -// @Param end formData int true "End timestamp of the waveform data to be queried, in milliseconds" -// @Param format formData string true "Format of the waveform data to be queried, `json` or `sac`" -// @Param channel formData string false "Channel of the waveform, `EHZ`, `EHE` or `EHN`, reuqired when format is `sac`" -// @Failure 400 {object} response.HttpResponse "Failed to export waveform data due to invalid format or channel" -// @Failure 410 {object} response.HttpResponse "Failed to export waveform data due to no data available" -// @Failure 500 {object} response.HttpResponse "Failed to export waveform data due to failed to read data source" -// @Success 200 {object} response.HttpResponse{data=[]publisher.Geophone} "Successfully exported the waveform data" -func (h *History) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) { - rg.POST("/history", func(c *gin.Context) { - var binding Binding - if err := c.ShouldBind(&binding); err != nil { - response.Error(c, http.StatusBadRequest) - return - } - - switch binding.Format { - case "json": - data, err := filterHistory(binding.Start, binding.End, JSON_DURATION, options) - if err != nil { - response.Error(c, http.StatusGone) - return - } - response.Message(c, "The waveform data was successfully filtered", data) - return - case "sac": - data, err := filterHistory(binding.Start, binding.End, SAC_DURATION, options) - if err != nil { - response.Error(c, http.StatusGone) - return - } - fileName, dataBytes, err := getSACBytes(data, binding.Channel, options) - if err != nil { - response.Error(c, http.StatusInternalServerError) - return - } - - response.File(c, fileName, dataBytes) - return - } - - response.Error(c, http.StatusBadRequest) - }) -} diff --git a/app/v1/history/sac.go b/app/v1/history/sac.go deleted file mode 100644 index 1d31fe71f..000000000 --- a/app/v1/history/sac.go +++ /dev/null @@ -1,90 +0,0 @@ -package history - -import ( - "fmt" - "sort" - "time" - - "github.com/anyshake/observer/app" - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/utils/duration" - "github.com/bclswl0827/sacio" -) - -func getSACBytes(data []publisher.Geophone, channel string, options *app.ServerOptions) (string, []byte, error) { - // Check channel - if channel != "EHZ" && channel != "EHE" && channel != "EHN" { - err := fmt.Errorf("no channel selected") - return "", nil, err - } - - // Re-sort Geophone struct array - sort.Slice(data, func(i, j int) bool { - return data[i].TS < data[j].TS - }) - - // Get basic info - var ( - startTime = time.UnixMilli(data[0].TS).UTC() - endTime = time.UnixMilli(data[len(data)-1].TS).UTC() - station = getStation(options.FeatureOptions.Config) - network = getNetwork(options.FeatureOptions.Config) - location = getLocation(options.FeatureOptions.Config) - ) - - // Get sample rate - sampleRate, err := getSampleRate(data, channel) - if err != nil { - return "", nil, err - } - - // Create channel data buffer - var channelBuffer []int32 - for _, v := range data { - switch channel { - case "EHZ": - channelBuffer = append(channelBuffer, v.EHZ...) - case "EHE": - channelBuffer = append(channelBuffer, v.EHE...) - case "EHN": - channelBuffer = append(channelBuffer, v.EHN...) - } - } - - // Init SAC library - var sac sacio.SACData - sac.Init() - - // Set SAC header and body - sac.SetTime(startTime, duration.Difference(startTime, endTime)) - sac.SetInfo(network, station, location, channel) - sac.SetBody(int32ToFloat32(channelBuffer), sampleRate) - - // Get SAC file bytes - sacBytes, err := sac.Encode(sacio.MSBFIRST) - if err != nil { - return "", nil, err - } - - // Return filename and bytes (e.g. 2023.193.14.22.51.0317.AS.SHAKE.00.EHZ.D.sac) - filename := fmt.Sprintf("%s.%s.%s.%s.%s.%04d.%s.%s.%s.%s.D.sac", - startTime.Format("2006"), - startTime.Format("002"), - startTime.Format("15"), - startTime.Format("04"), - startTime.Format("05"), - // Get the current millisecond - startTime.Nanosecond()/1000000, - station, network, - location, channel, - ) - return filename, sacBytes, nil -} - -func int32ToFloat32(arr []int32) []float32 { - floatSlice := make([]float32, len(arr)) - for i, num := range arr { - floatSlice[i] = float32(num) - } - return floatSlice -} diff --git a/app/v1/history/sample.go b/app/v1/history/sample.go deleted file mode 100644 index abafdff14..000000000 --- a/app/v1/history/sample.go +++ /dev/null @@ -1,40 +0,0 @@ -package history - -import ( - "fmt" - "time" - - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/utils/duration" -) - -func getSampleRate(data []publisher.Geophone, channel string) (int, error) { - var ( - sampleRateSum float64 - lastTime = time.UnixMilli(data[0].TS) - ) - - for i := 1; i < len(data); i++ { - var ( - currentTime = time.UnixMilli(data[i].TS) - timeDiff = duration.Difference(currentTime, lastTime) - ) - if timeDiff > THRESHOLD { - err := fmt.Errorf("uneven gaps between the data") - return 0, err - } - - switch channel { - case "EHZ": - sampleRateSum += float64(len(data[i].EHZ)) / timeDiff.Seconds() - case "EHE": - sampleRateSum += float64(len(data[i].EHE)) / timeDiff.Seconds() - case "EHN": - sampleRateSum += float64(len(data[i].EHN)) / timeDiff.Seconds() - } - - lastTime = currentTime - } - - return int(sampleRateSum) / len(data), nil -} diff --git a/app/v1/history/types.go b/app/v1/history/types.go deleted file mode 100644 index 39afa6f4f..000000000 --- a/app/v1/history/types.go +++ /dev/null @@ -1,18 +0,0 @@ -package history - -import "time" - -const ( - JSON_DURATION = time.Hour // The maximum duration of the JSON data to be exported - SAC_DURATION = time.Hour // The maximum duration of the SAC data to be exported - THRESHOLD = time.Minute // There are uneven gaps between the data if time difference is greater than THRESHOLD -) - -type History struct{} - -type Binding struct { - Start int64 `form:"start" json:"start" xml:"start" binding:"required,numeric"` - End int64 `form:"end" json:"end" xml:"end" binding:"required,numeric"` - Format string `form:"format" json:"format" xml:"format" binding:"required,oneof=json sac"` - Channel string `form:"channel" json:"channel" xml:"channel" binding:"omitempty,oneof=EHZ EHE EHN"` -} diff --git a/app/v1/socket/message.go b/app/v1/socket/message.go deleted file mode 100644 index bd67d6dfc..000000000 --- a/app/v1/socket/message.go +++ /dev/null @@ -1,24 +0,0 @@ -package socket - -import ( - "encoding/json" - - "github.com/anyshake/observer/publisher" - "github.com/gorilla/websocket" -) - -func (s *Socket) handleMessage(gp *publisher.Geophone, conn *websocket.Conn) error { - data, err := json.Marshal(gp) - if err != nil { - conn.Close() - return err - } - - err = conn.WriteMessage(websocket.TextMessage, data) - if err != nil { - conn.Close() - return err - } - - return nil -} diff --git a/app/v1/socket/module.go b/app/v1/socket/module.go deleted file mode 100644 index 974411b09..000000000 --- a/app/v1/socket/module.go +++ /dev/null @@ -1,56 +0,0 @@ -package socket - -import ( - "net/http" - - "github.com/anyshake/observer/app" - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/server/response" - "github.com/gin-gonic/gin" - "github.com/gorilla/websocket" -) - -func (s *Socket) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) { - rg.GET("/socket", func(c *gin.Context) { - var upgrader = websocket.Upgrader{ - ReadBufferSize: 1024, WriteBufferSize: 1024, EnableCompression: true, - Error: func(w http.ResponseWriter, r *http.Request, status int, reason error) { - response.Error(c, http.StatusBadRequest) - }, - CheckOrigin: func(r *http.Request) bool { - return true - }, - } - - // Upgrade connection to WebSocket - conn, err := upgrader.Upgrade(c.Writer, c.Request, nil) - if err != nil { - return - } - - // Flag to indicate (un)subscribe - expressionForSubscribe := true - - // Properly close connection - go func(exp *bool) { - for { - _, _, err := conn.ReadMessage() - if err != nil { - break - } - } - - *exp = false - conn.Close() - }(&expressionForSubscribe) - - // Write when new message arrived - publisher.Subscribe( - &options.FeatureOptions.Status.Geophone, - &expressionForSubscribe, - func(gp *publisher.Geophone) error { - return s.handleMessage(gp, conn) - }, - ) - }) -} diff --git a/app/v1/socket/types.go b/app/v1/socket/types.go deleted file mode 100644 index 93331feea..000000000 --- a/app/v1/socket/types.go +++ /dev/null @@ -1,3 +0,0 @@ -package socket - -type Socket struct{} diff --git a/app/v1/station/adc.go b/app/v1/station/adc.go deleted file mode 100644 index a9924b7a3..000000000 --- a/app/v1/station/adc.go +++ /dev/null @@ -1,10 +0,0 @@ -package station - -import "github.com/anyshake/observer/config" - -func getADC(conf *config.Conf) adcModel { - return adcModel{ - Resolution: conf.ADC.Resolution, - FullScale: conf.ADC.FullScale, - } -} diff --git a/app/v1/station/disk.go b/app/v1/station/disk.go deleted file mode 100644 index 863b19de5..000000000 --- a/app/v1/station/disk.go +++ /dev/null @@ -1,26 +0,0 @@ -package station - -import ( - "os" - - "github.com/shirou/gopsutil/disk" -) - -func getDisk() diskModel { - cwd, err := os.Getwd() - if err != nil { - panic(err) - } - - usage, err := disk.Usage(cwd) - if err != nil { - panic(err) - } - - return diskModel{ - Total: usage.Total, - Free: usage.Free, - Used: usage.Used, - Percent: usage.UsedPercent, - } -} diff --git a/app/v1/station/geophone.go b/app/v1/station/geophone.go deleted file mode 100644 index af087b516..000000000 --- a/app/v1/station/geophone.go +++ /dev/null @@ -1,10 +0,0 @@ -package station - -import "github.com/anyshake/observer/config" - -func getGeophone(conf *config.Conf) geophoneModel { - return geophoneModel{ - Sensitivity: conf.Geophone.Sensitivity, - Frequency: conf.Geophone.Frequency, - } -} diff --git a/app/v1/station/memory.go b/app/v1/station/memory.go deleted file mode 100644 index 98483797b..000000000 --- a/app/v1/station/memory.go +++ /dev/null @@ -1,14 +0,0 @@ -package station - -import "github.com/shirou/gopsutil/mem" - -func getMemory() memoryModel { - vmStat, _ := mem.VirtualMemory() - - return memoryModel{ - Total: vmStat.Total, - Free: vmStat.Free, - Used: vmStat.Used, - Percent: vmStat.UsedPercent, - } -} diff --git a/app/v1/station/module.go b/app/v1/station/module.go deleted file mode 100644 index 7661ccb50..000000000 --- a/app/v1/station/module.go +++ /dev/null @@ -1,18 +0,0 @@ -package station - -import ( - "github.com/anyshake/observer/app" - "github.com/anyshake/observer/server/response" - "github.com/gin-gonic/gin" -) - -// @Summary AnyShake Observer station status -// @Description Get Observer station status including system information, memory usage, disk usage, CPU usage, ADC information, geophone information, and location information -// @Router /station [get] -// @Produce application/json -// @Success 200 {object} response.HttpResponse{data=System} "Successfully read station status" -func (s *Station) RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) { - rg.GET("/station", func(c *gin.Context) { - response.Message(c, "Successfully read station information", getSystem(options.FeatureOptions)) - }) -} diff --git a/app/v1/station/os.go b/app/v1/station/os.go deleted file mode 100644 index d833ffab0..000000000 --- a/app/v1/station/os.go +++ /dev/null @@ -1,20 +0,0 @@ -package station - -import ( - "os" - "runtime" - - "github.com/wille/osutil" -) - -func getOS() osModel { - hostname, _ := os.Hostname() - osInfo := osModel{ - OS: runtime.GOOS, - Arch: runtime.GOARCH, - Distro: osutil.Name, - Hostname: hostname, - } - - return osInfo -} diff --git a/app/v1/station/position.go b/app/v1/station/position.go deleted file mode 100644 index 2075e02bc..000000000 --- a/app/v1/station/position.go +++ /dev/null @@ -1,11 +0,0 @@ -package station - -import "github.com/anyshake/observer/config" - -func getLocation(conf *config.Conf) positionModel { - return positionModel{ - Latitude: conf.Station.Latitude, - Longitude: conf.Station.Longitude, - Elevation: conf.Station.Elevation, - } -} diff --git a/app/v1/station/station.go b/app/v1/station/station.go deleted file mode 100644 index ea9d7c9a9..000000000 --- a/app/v1/station/station.go +++ /dev/null @@ -1,13 +0,0 @@ -package station - -import "github.com/anyshake/observer/config" - -func getStation(conf *config.Conf) stationModel { - return stationModel{ - UUID: conf.Station.UUID, - Name: conf.Station.Name, - Station: conf.Station.Station, - Network: conf.Station.Network, - Location: conf.Station.Location, - } -} diff --git a/app/v1/station/system.go b/app/v1/station/system.go deleted file mode 100644 index 0c64547bd..000000000 --- a/app/v1/station/system.go +++ /dev/null @@ -1,23 +0,0 @@ -package station - -import ( - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/duration" -) - -func getSystem(options *feature.FeatureOptions) System { - _, ts := duration.Timestamp(options.Status.System.Offset) - return System{ - Status: options.Status.System, - Location: getLocation(options.Config), - Geophone: getGeophone(options.Config), - Station: getStation(options.Config), - ADC: getADC(options.Config), - Memory: getMemory(), - Uptime: getUptime(), - Disk: getDisk(), - CPU: getCPU(), - OS: getOS(), - Timestamp: ts, - } -} diff --git a/app/v1/station/types.go b/app/v1/station/types.go deleted file mode 100644 index c989ca962..000000000 --- a/app/v1/station/types.go +++ /dev/null @@ -1,69 +0,0 @@ -package station - -import "github.com/anyshake/observer/publisher" - -type Station struct{} - -type adcModel struct { - Resolution int `json:"resolution"` - FullScale float64 `json:"fullscale"` -} - -type geophoneModel struct { - Sensitivity float64 `json:"sensitivity"` - Frequency float64 `json:"frequency"` -} - -type memoryModel struct { - Total uint64 `json:"total"` - Free uint64 `json:"free"` - Used uint64 `json:"used"` - Percent float64 `json:"percent"` -} - -type diskModel struct { - Total uint64 `json:"total"` - Free uint64 `json:"free"` - Used uint64 `json:"used"` - Percent float64 `json:"percent"` -} - -type osModel struct { - OS string `json:"os"` - Arch string `json:"arch"` - Distro string `json:"distro"` - Hostname string `json:"hostname"` -} - -type cpuModel struct { - Model string `json:"model"` - Percent float64 `json:"percent"` -} - -type positionModel struct { - Latitude float64 `json:"latitude"` - Longitude float64 `json:"longitude"` - Elevation float64 `json:"elevation"` -} - -type stationModel struct { - UUID string `json:"uuid"` - Name string `json:"name"` - Station string `json:"station"` - Network string `json:"network"` - Location string `json:"location"` -} - -type System struct { - Timestamp int64 `json:"timestamp"` - Uptime int64 `json:"uptime"` - Station stationModel `json:"station"` - Memory memoryModel `json:"memory"` - Disk diskModel `json:"disk"` - ADC adcModel `json:"adc"` - OS osModel `json:"os"` - CPU cpuModel `json:"cpu"` - Geophone geophoneModel `json:"geophone"` - Location positionModel `json:"position"` - Status *publisher.System `json:"status"` -} diff --git a/app/v1/station/uptime.go b/app/v1/station/uptime.go deleted file mode 100644 index 15a3e1555..000000000 --- a/app/v1/station/uptime.go +++ /dev/null @@ -1,14 +0,0 @@ -package station - -import ( - "github.com/mackerelio/go-osstat/uptime" -) - -func getUptime() int64 { - up, err := uptime.Get() - if err != nil { - return -1 - } - - return int64(up.Seconds()) -} diff --git a/app/v1/trace/distance.go b/app/v1/trace/distance.go deleted file mode 100644 index 86d74c576..000000000 --- a/app/v1/trace/distance.go +++ /dev/null @@ -1,22 +0,0 @@ -package trace - -import "math" - -func getDistance(lat1, lat2, lng1, lng2 float64) float64 { - var ( - radius = 6378.137 - rad = math.Pi / 180.0 - ) - lat1 = lat1 * rad - lng1 = lng1 * rad - lat2 = lat2 * rad - lng2 = lng2 * rad - - var ( - a = lat1 - lat2 - b = lng1 - lng2 - cal = 2 * math.Asin(math.Sqrt(math.Pow(math.Sin(a/2), 2)+math.Cos(lat1)*math.Cos(lat2)*math.Pow(math.Sin(b/2), 2))) * radius - result = math.Round(cal*10000) / 10000 - ) - return result -} diff --git a/app/v1/trace/empty.go b/app/v1/trace/empty.go deleted file mode 100644 index b80fd6447..000000000 --- a/app/v1/trace/empty.go +++ /dev/null @@ -1,16 +0,0 @@ -package trace - -func isEmpty(m map[string]any, k []string) bool { - for _, v := range k { - switch m[v].(type) { - case string: - if len(m[v].(string)) == 0 { - return false - } - default: - continue - } - } - - return true -} diff --git a/app/v1/trace/float.go b/app/v1/trace/float.go deleted file mode 100644 index 158467b48..000000000 --- a/app/v1/trace/float.go +++ /dev/null @@ -1,12 +0,0 @@ -package trace - -import "strconv" - -func string2Float(num string) float64 { - r, err := strconv.ParseFloat(num, 64) - if err != nil { - return 0.0 - } - - return r -} diff --git a/app/v1/trace/key.go b/app/v1/trace/key.go deleted file mode 100644 index 7f3975822..000000000 --- a/app/v1/trace/key.go +++ /dev/null @@ -1,11 +0,0 @@ -package trace - -func hasKey(m map[string]any, k []string) bool { - for _, v := range k { - if _, ok := m[v]; !ok { - return false - } - } - - return true -} diff --git a/app/v1/trace/sort.go b/app/v1/trace/sort.go deleted file mode 100644 index 57c5cd6f9..000000000 --- a/app/v1/trace/sort.go +++ /dev/null @@ -1,9 +0,0 @@ -package trace - -import "sort" - -func sortByTimestamp(events []Event) { - sort.Slice(events, func(i, j int) bool { - return events[i].Timestamp > events[j].Timestamp - }) -} diff --git a/app/v1/trace/types.go b/app/v1/trace/types.go deleted file mode 100644 index ce370c4ba..000000000 --- a/app/v1/trace/types.go +++ /dev/null @@ -1,42 +0,0 @@ -package trace - -import "time" - -const EXPIRATION = time.Minute // Cache expiration duration for calling external API response - -type Trace struct{} - -type Binding struct { - Source string `form:"source" json:"source" xml:"source" binding:"required"` -} - -type estimation struct { - P float64 `json:"p"` - S float64 `json:"s"` -} - -type Event struct { - Verfied bool `json:"verfied"` - Timestamp int64 `json:"timestamp"` - Event string `json:"event"` - Region string `json:"region"` - Depth float64 `json:"depth"` - Latitude float64 `json:"latitude"` - Longitude float64 `json:"longitude"` - Distance float64 `json:"distance"` - Magnitude float64 `json:"magnitude"` - Estimation estimation `json:"estimation"` -} - -type DataSource interface { - Property() string - Fetch() ([]byte, error) - Parse([]byte) (map[string]any, error) - List(latitude, longitude float64) ([]Event, error) - Format(float64, float64, map[string]any) ([]Event, error) -} - -type DataSourceCache struct { - Time time.Time - Cache []byte -} diff --git a/build/assets/config.json b/build/assets/config.json index 7a5f9bc1a..8bbf3268c 100644 --- a/build/assets/config.json +++ b/build/assets/config.json @@ -1,63 +1,67 @@ { "station_settings": { - "uuid": "a373e39c-8e15-44ae-a1ad-6fb622bc49e6", "name": "AnyShake Station", - "station": "SHAKE", - "network": "AS", - "location": "00", - "latitude": 39.9, - "longitude": 116.3, - "elevation": 0, + "owner": "Lee", "region": "Asia", "country": "China", - "city": "Beijing", - "owner": "Lee" + "city": "Beijing" }, - "geophone_settings": { - "sensitivity": 28.8, - "frequency": 4.5 + "location_settings": { + "latitude": 39.9, + "longitude": 116.4, + "elevation": 50.0 }, - "adc_settings": { - "resolution": 27, - "fullscale": 5.0 + "explorer_settings": { + "dsn": "transport:///dev/ttyUSB0?baudrate=115200", + "engine": "serial", + "legacy": false }, - "serial_settings": { - "packet": 4, - "baud": 19200, - "device": "/dev/ttyUSB0" + "sensor_settings": { + "frequency": 4.5, + "sensitivity": 28.8, + "velocity": true, + "vref": 5.0, + "fullscale": 10.0, + "resolution": 32 + }, + "stream_settings": { + "network": "AS", + "station": "SHAKE", + "location": "00", + "channel": "EH" }, "ntpclient_settings": { - "host": "0.pool.ntp.org", - "port": 123, - "timeout": 3, - "interval": 5 - }, - "archiver_settings": { - "enable": false, - "engine": "postgresql", - "host": "127.0.0.1", - "port": 5432, - "username": "postgres", - "password": "passw0rd", - "database": "observer", - "lifecycle": 10 + "host": "pool.ntp.org", + "port": 123 + }, + "database_settings": { + "engine": "sqlite", + "host": "", + "port": 0, + "username": "", + "password": "", + "database": "/home/yuki/observer.db" }, "server_settings": { "host": "0.0.0.0", "port": 8073, "cors": true, - "debug": false, + "debug": true, "rate": 30 }, - "miniseed_settings": { - "enable": false, - "path": "/data/miniseed", - "lifecycle": 10 + "logger_settings": { + "level": "info", + "dump": "/home/yuki/observer.log" }, - "seedlink_settings": { - "enable": false, - "host": "0.0.0.0", - "port": 18000, - "duration": 86400 + "services_settings": { + "miniseed": { + "enable": true, + "lifecycle": 10, + "path": "/home/yuki/miniseed" + }, + "archiver": { + "enable": true, + "lifecycle": 10 + } } -} +} \ No newline at end of file diff --git a/cleaners/database/execute.go b/cleaners/database/execute.go new file mode 100644 index 000000000..7201a9c0d --- /dev/null +++ b/cleaners/database/execute.go @@ -0,0 +1,12 @@ +package database + +import ( + "github.com/anyshake/observer/cleaners" + "github.com/anyshake/observer/drivers/dao" + "github.com/anyshake/observer/utils/logger" +) + +func (d *DatabaseCleanerTask) Execute(options *cleaners.Options) { + logger.GetLogger(d.GetTaskName()).Info("closing connection to database") + dao.Close(options.Database) +} diff --git a/cleaners/database/name.go b/cleaners/database/name.go new file mode 100644 index 000000000..48adfc1bd --- /dev/null +++ b/cleaners/database/name.go @@ -0,0 +1,5 @@ +package database + +func (t *DatabaseCleanerTask) GetTaskName() string { + return "database" +} diff --git a/cleaners/database/types.go b/cleaners/database/types.go new file mode 100644 index 000000000..262983941 --- /dev/null +++ b/cleaners/database/types.go @@ -0,0 +1,3 @@ +package database + +type DatabaseCleanerTask struct{} diff --git a/cleaners/explorer/execute.go b/cleaners/explorer/execute.go new file mode 100644 index 000000000..6d35dd685 --- /dev/null +++ b/cleaners/explorer/execute.go @@ -0,0 +1,14 @@ +package explorer + +import ( + "github.com/anyshake/observer/cleaners" + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/utils/logger" +) + +func (p *ExplorerCleanerTask) Execute(options *cleaners.Options) { + options.Dependency.Invoke(func(explorerDeps *explorer.ExplorerDependency) { + logger.GetLogger(p.GetTaskName()).Info("closing connection to hardware device") + explorerDeps.Transport.Close() + }) +} diff --git a/cleaners/explorer/name.go b/cleaners/explorer/name.go new file mode 100644 index 000000000..06bca43a2 --- /dev/null +++ b/cleaners/explorer/name.go @@ -0,0 +1,5 @@ +package explorer + +func (t *ExplorerCleanerTask) GetTaskName() string { + return "explorer" +} diff --git a/cleaners/explorer/types.go b/cleaners/explorer/types.go new file mode 100644 index 000000000..cc3f4124c --- /dev/null +++ b/cleaners/explorer/types.go @@ -0,0 +1,3 @@ +package explorer + +type ExplorerCleanerTask struct{} diff --git a/cleaners/types.go b/cleaners/types.go new file mode 100644 index 000000000..2561bc738 --- /dev/null +++ b/cleaners/types.go @@ -0,0 +1,20 @@ +package cleaners + +import ( + "github.com/anyshake/observer/config" + "github.com/anyshake/observer/utils/timesource" + "go.uber.org/dig" + "gorm.io/gorm" +) + +type Options struct { + Config *config.Config + Database *gorm.DB + Dependency *dig.Container + TimeSource timesource.Source +} + +type CleanerTask interface { + Execute(*Options) + GetTaskName() string +} diff --git a/utils/text/contact.go b/cmd/concat.go similarity index 79% rename from utils/text/contact.go rename to cmd/concat.go index 421632b2c..c8c9ea7f3 100644 --- a/utils/text/contact.go +++ b/cmd/concat.go @@ -1,11 +1,11 @@ -package text +package main import ( "fmt" "strings" ) -func Concat(v ...any) string { +func concat(v ...any) string { builder := strings.Builder{} for _, value := range v { builder.WriteString(fmt.Sprintf("%+v", value)) diff --git a/cmd/main.go b/cmd/main.go index a70a1ea6c..224d3c7e9 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -1,103 +1,219 @@ package main import ( + "context" + "flag" "fmt" "os" "os/signal" "sync" "syscall" + "time" - "github.com/fatih/color" + "go.uber.org/dig" - "github.com/anyshake/observer/app" + "github.com/anyshake/observer/cleaners" + cleaner_database "github.com/anyshake/observer/cleaners/database" + cleaner_explorer "github.com/anyshake/observer/cleaners/explorer" "github.com/anyshake/observer/config" - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/feature/archiver" - "github.com/anyshake/observer/feature/geophone" - "github.com/anyshake/observer/feature/miniseed" - "github.com/anyshake/observer/feature/ntpclient" - "github.com/anyshake/observer/feature/seedlink" - "github.com/anyshake/observer/publisher" + "github.com/anyshake/observer/drivers/dao" "github.com/anyshake/observer/server" + "github.com/anyshake/observer/services" + service_archiver "github.com/anyshake/observer/services/archiver" + + service_miniseed "github.com/anyshake/observer/services/miniseed" + service_watchdog "github.com/anyshake/observer/services/watchdog" + "github.com/anyshake/observer/startups" + startup_explorer "github.com/anyshake/observer/startups/explorer" "github.com/anyshake/observer/utils/logger" + "github.com/anyshake/observer/utils/timesource" + "github.com/beevik/ntp" "github.com/common-nighthawk/go-figure" ) -func parseCommandLine(conf *config.Conf) error { - var args config.Args - args.Read() +func parseCommandLine() (args arguments) { + flag.StringVar(&args.Path, "config", "./config.json", "Path to config file") + flag.BoolVar(&args.Version, "version", false, "Print version information") + flag.Parse() + if args.Version { printVersion() os.Exit(0) } - err := conf.Read(args.Path) + return args +} + +func setupLogger(level, dumpPath string) { + var err error + switch level { + case "info": + err = logger.SetLevel(logger.INFO) + case "warn": + err = logger.SetLevel(logger.WARN) + case "error": + err = logger.SetLevel(logger.ERROR) + default: + err = logger.SetLevel(logger.INFO) + } + if err != nil { - return err + logger.GetLogger(main).Fatalln(err) } - return nil + if len(dumpPath) != 0 { + logger.SetFile(dumpPath) + } } func init() { - w := color.New(color.FgHiCyan).SprintFunc() t := figure.NewFigure("Observer", "standard", true).String() - fmt.Println(w(t)) + fmt.Println(t) + logger.Initialize() } // @BasePath /api/v1 -// @title Observer RESTful API documentation -// @description This is Observer RESTful API documentation, please set `server_settings.debug` to `false` in `config.json` when deploying to production environment in case of any security issues. +// @title AnyShake Observer APIv1 +// @description This is APIv1 documentation for AnyShake Observer, please set `server_settings.debug` to `false` in `config.json` when deploying to production environment in case of any security issues. func main() { - // Read configuration - var conf config.Conf - err := parseCommandLine(&conf) + args := parseCommandLine() + + var conf config.Config + err := conf.Read(args.Path) + if err != nil { + logger.GetLogger(main).Fatalln(err) + } + err = conf.Validate() + if err != nil { + logger.GetLogger(main).Fatalln(err) + } + + // Setup logger with given configuration + setupLogger(conf.Logger.Level, conf.Logger.Dump) + logger.GetLogger(main).Info("global configuration has been loaded") + + // Create time source with NTP server + logger.GetLogger(main).Infof("querying NTP server at %s:%d", conf.NtpClient.Host, conf.NtpClient.Port) + res, err := ntp.QueryWithOptions(conf.NtpClient.Host, ntp.QueryOptions{ + Port: conf.NtpClient.Port, Timeout: 10 * time.Second, + }) + if err != nil { + logger.GetLogger(main).Fatalln(err) + } + timeSource := timesource.New(time.Now(), res.Time) + logger.GetLogger(main).Info("time source has been created") + + // Connect to database + databaseConn, err := dao.Open( + conf.Database.Host, + conf.Database.Port, + conf.Database.Engine, + conf.Database.Username, + conf.Database.Password, + conf.Database.Database, + ) if err != nil { - logger.Fatal("main", err, color.FgRed) - } else { - logger.Print("main", "main daemon has initialized", color.FgMagenta, false) + logger.GetLogger(main).Fatalln(err) } + logger.GetLogger(main).Info("database connection has been established") + err = migrate(databaseConn) + if err != nil { + logger.GetLogger(main).Fatalln(err) + } + logger.GetLogger(main).Info("database schema has been migrated") + + // Create dependency injection container + depsContainer := dig.New() + + // Setup context for graceful shutdown + cancelToken, abortSignal := context.WithCancel(context.Background()) - // Initialize global status - var status publisher.Status - publisher.Initialize(&conf, &status) + // Setup cleaner tasks for graceful shutdown + cleanerTasks := []cleaners.CleanerTask{ + &cleaner_explorer.ExplorerCleanerTask{}, + &cleaner_database.DatabaseCleanerTask{}, + } + cleanerOptions := &cleaners.Options{ + Config: &conf, + Database: databaseConn, + Dependency: depsContainer, + TimeSource: timeSource, + } + runCleanerTasks := func() { + for _, t := range cleanerTasks { + taskName := t.GetTaskName() + logger.GetLogger(taskName).Infof("running cleaner task for %s", taskName) + t.Execute(cleanerOptions) + } + } + defer runCleanerTasks() + + // Setup startup tasks and provide dependencies + startupTasks := []startups.StartupTask{ + &startup_explorer.ExplorerStartupTask{CancelToken: cancelToken}, + } + startupOptions := &startups.Options{ + Config: &conf, + Database: databaseConn, + TimeSource: timeSource, + } + for _, t := range startupTasks { + taskName := t.GetTaskName() + err := t.Provide(depsContainer, startupOptions) + if err != nil { + logger.GetLogger(taskName).Errorln(err) + runCleanerTasks() + os.Exit(1) + } + err = t.Execute(depsContainer, startupOptions) + if err != nil { + logger.GetLogger(taskName).Errorln(err) + runCleanerTasks() + os.Exit(1) + } + } - // Register features - features := []feature.Feature{ - &ntpclient.NTPClient{}, - &geophone.Geophone{}, - &archiver.Archiver{}, - &miniseed.MiniSEED{}, - &seedlink.SeedLink{}, + // Setup background services + regServices := []services.Service{ + &service_watchdog.WatchdogService{}, + &service_archiver.ArchiverService{}, + &service_miniseed.MiniSeedService{}, } - featureOptions := &feature.FeatureOptions{ - Config: &conf, - Status: &status, + serviceOptions := &services.Options{ + Config: &conf, + Database: databaseConn, + Dependency: depsContainer, + TimeSource: timeSource, + CancelToken: cancelToken, } - featureWaitGroup := new(sync.WaitGroup) - for _, s := range features { - go s.Run(featureOptions, featureWaitGroup) + var waitGroup sync.WaitGroup + for _, s := range regServices { + waitGroup.Add(1) + go s.Start(serviceOptions, &waitGroup) } // Start HTTP server - go server.StartDaemon( + go server.Serve( conf.Server.Host, conf.Server.Port, - &app.ServerOptions{ - Gzip: 9, - WebPrefix: WEB_PREFIX, - APIPrefix: API_PREFIX, - FeatureOptions: featureOptions, - CORS: conf.Server.CORS, - RateFactor: conf.Server.Rate, + &server.Options{ + CORS: conf.Server.CORS, + DebugMode: conf.Server.Debug, + GzipLevel: GZIP_LEVEL, + RateFactor: conf.Server.Rate, + WebPrefix: WEB_PREFIX, + ApiPrefix: API_PREFIX, + ServicesOptions: serviceOptions, }) + logger.GetLogger(main).Infof("web server is listening on %s:%d", conf.Server.Host, conf.Server.Port) // Receive interrupt signals - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, os.Interrupt, syscall.SIGTERM) - <-sigCh + osSignal := make(chan os.Signal, 1) + signal.Notify(osSignal, os.Interrupt, syscall.SIGTERM) + <-osSignal - // Wait for all features to stop - logger.Print("main", "main daemon is shutting down", color.FgMagenta, true) - featureWaitGroup.Wait() + // Stop services gracefully + logger.GetLogger(main).Info("services are shutting down, please wait") + abortSignal() + waitGroup.Wait() } diff --git a/cmd/migrate.go b/cmd/migrate.go new file mode 100644 index 000000000..af17946fc --- /dev/null +++ b/cmd/migrate.go @@ -0,0 +1,16 @@ +package main + +import ( + "github.com/anyshake/observer/drivers/dao" + "github.com/anyshake/observer/drivers/dao/tables" + "gorm.io/gorm" +) + +func migrate(databaseConn *gorm.DB) error { + err := dao.Migrate(databaseConn, tables.AdcCount{}) + if err != nil { + return err + } + + return nil +} diff --git a/cmd/types.go b/cmd/types.go index 1c7008c29..a6a5bc3ff 100644 --- a/cmd/types.go +++ b/cmd/types.go @@ -1,6 +1,12 @@ package main const ( - API_PREFIX string = "/api" - WEB_PREFIX string = "/" + GZIP_LEVEL = 9 + WEB_PREFIX = "/" + API_PREFIX = "/api" ) + +type arguments struct { + Path string // Path to config file + Version bool // Show version information +} diff --git a/cmd/version.go b/cmd/version.go index 35f80ea8d..2b74fda9e 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -4,9 +4,6 @@ import ( "fmt" "runtime" "time" - - "github.com/anyshake/observer/utils/text" - "github.com/fatih/color" ) var ( @@ -16,14 +13,11 @@ var ( ) func printVersion() { - var ( - copyright = "© AnyShake " + fmt.Sprintf("%d", time.Now().Year()) + ". All Rights Reversed." - version = text.Concat( - "Observer ", version, " (", description, ")\nRelease: ", version, "-", release, " ", - runtime.Version(), " ", runtime.GOOS, "/", runtime.GOARCH, "\n", copyright, - ) + copyright := "© AnyShake " + fmt.Sprintf("%d", time.Now().Year()) + ". All Rights Reversed." + version = concat( + "Observer ", version, " (", description, ")\nRelease: ", version, "-", release, " ", + runtime.Version(), " ", runtime.GOOS, "/", runtime.GOARCH, "\n", copyright, ) - w := color.New(color.FgHiCyan).SprintFunc() - fmt.Println(w(version)) + fmt.Println(version) } diff --git a/config/args.go b/config/args.go deleted file mode 100644 index f3b7afc6f..000000000 --- a/config/args.go +++ /dev/null @@ -1,9 +0,0 @@ -package config - -import "flag" - -func (a *Args) Read() { - flag.StringVar(&a.Path, "config", "./config.json", "Path to config file") - flag.BoolVar(&a.Version, "version", false, "Print version information") - flag.Parse() -} diff --git a/config/config.go b/config/config.go index be5f6d41a..f3adc3f40 100644 --- a/config/config.go +++ b/config/config.go @@ -5,7 +5,7 @@ import ( "os" ) -func (c *Conf) Read(path string) error { +func (c *Config) Read(path string) error { file, err := os.Open(path) if err != nil { return err diff --git a/config/types.go b/config/types.go index 6863f4fca..096d4b978 100644 --- a/config/types.go +++ b/config/types.go @@ -1,46 +1,48 @@ package config -type station struct { - UUID string `json:"uuid"` - Name string `json:"name"` - Station string `json:"station"` - Network string `json:"network"` - Location string `json:"location"` - Country string `json:"country"` - Region string `json:"region"` - City string `json:"city"` - Owner string `json:"owner"` +type Station struct { + Name string `json:"name"` + Owner string `json:"owner"` + Region string `json:"region"` + Country string `json:"country"` + City string `json:"city"` +} + +type location struct { Latitude float64 `json:"latitude"` Longitude float64 `json:"longitude"` Elevation float64 `json:"elevation"` } -type serial struct { - Device string `json:"device"` - Baud int `json:"baud"` - Packet int `json:"packet"` -} - -type adc struct { - FullScale float64 `json:"fullscale"` - Resolution int `json:"resolution"` +type explorer struct { + Legacy bool `json:"legacy"` + DSN string `json:"dsn"` + Engine string `json:"engine"` } -type geophone struct { +type Sensor struct { Frequency float64 `json:"frequency"` Sensitivity float64 `json:"sensitivity"` + Velocity bool `json:"velocity"` + Vref float64 `json:"vref"` + FullScale float64 `json:"fullscale"` + Resolution int `json:"resolution"` +} + +type Stream struct { + Station string `json:"station"` + Network string `json:"network"` + Location string `json:"location"` + Channel string `json:"channel"` } type ntpclient struct { - Host string `json:"host"` - Port int `json:"port"` - Timeout int `json:"timeout"` - Interval int `json:"interval"` + Host string `json:"host"` + Port int `json:"port"` } -type archiver struct { +type database struct { Engine string `json:"engine"` - Enable bool `json:"enable"` Host string `json:"host"` Port int `json:"port"` LifeCycle int `json:"lifecycle"` @@ -57,32 +59,20 @@ type server struct { Rate int `json:"rate"` } -type miniseed struct { - Enable bool `json:"enable"` - Path string `json:"path"` - LifeCycle int `json:"lifecycle"` -} - -type seedlink struct { - Enable bool `json:"enable"` - Host string `json:"host"` - Port int `json:"port"` - Duration int `json:"duration"` -} - -type Conf struct { - Station station `json:"station_settings"` - Serial serial `json:"serial_settings"` - ADC adc `json:"adc_settings"` - Geophone geophone `json:"geophone_settings"` - NTPClient ntpclient `json:"ntpclient_settings"` - Archiver archiver `json:"archiver_settings"` - Server server `json:"server_settings"` - MiniSEED miniseed `json:"miniseed_settings"` - SeedLink seedlink `json:"seedlink_settings"` +type logger struct { + Level string `json:"level"` + Dump string `json:"dump"` } -type Args struct { - Path string // Path to config file - Version bool // Show version information +type Config struct { + Station Station `json:"station_settings"` + Location location `json:"location_settings"` + Explorer explorer `json:"explorer_settings"` + Sensor Sensor `json:"sensor_settings"` + Stream Stream `json:"stream_settings"` + NtpClient ntpclient `json:"ntpclient_settings"` + Database database `json:"database_settings"` + Server server `json:"server_settings"` + Logger logger `json:"logger_settings"` + Services map[string]any `json:"services_settings"` } diff --git a/config/validate.go b/config/validate.go new file mode 100644 index 000000000..51145dc33 --- /dev/null +++ b/config/validate.go @@ -0,0 +1,14 @@ +package config + +import "github.com/go-playground/validator/v10" + +func (c *Config) Validate() error { + validate := validator.New(validator.WithRequiredStructEnabled()) + err := validate.Struct(c) + + if err != nil { + return err + } + + return nil +} diff --git a/docs/Makefile b/docs/Makefile index 12bd9e649..2e5464d0c 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,7 +1,9 @@ -.PHONY: build +.PHONY: gen -build: +gen: ifeq ($(shell command -v swag 2> /dev/null),) - $(error "Swagger is not installed. Run go install github.com/swaggo/swag/cmd/swag@latest first") + @echo "Installing Swagger..." + @go get github.com/swaggo/swag/cmd/swag + @go install github.com/swaggo/swag/cmd/swag endif - @swag init -g ../cmd/main.go -d ../app,../server,../publisher -o ./ + @swag init -g ../cmd/main.go -d ../api,../config,../drivers/explorer,../server -o ./ diff --git a/docs/docs.go b/docs/docs.go index 55ce41b13..b489c72a6 100644 --- a/docs/docs.go +++ b/docs/docs.go @@ -29,15 +29,15 @@ const docTemplate = `{ "parameters": [ { "type": "integer", - "description": "Start timestamp of the waveform data to be queried, in milliseconds", - "name": "start", + "description": "Start timestamp of the waveform data to be queried, in milliseconds (unix timestamp)", + "name": "start_time", "in": "formData", "required": true }, { "type": "integer", - "description": "End timestamp of the waveform data to be queried, in milliseconds", - "name": "end", + "description": "End timestamp of the waveform data to be queried, in milliseconds (unix timestamp)", + "name": "end_time", "in": "formData", "required": true }, @@ -50,7 +50,7 @@ const docTemplate = `{ }, { "type": "string", - "description": "Channel of the waveform, ` + "`" + `EHZ` + "`" + `, ` + "`" + `EHE` + "`" + ` or ` + "`" + `EHN` + "`" + `, reuqired when format is ` + "`" + `sac` + "`" + `", + "description": "Channel of the waveform, ` + "`" + `Z` + "`" + `, ` + "`" + `E` + "`" + ` or ` + "`" + `N` + "`" + `, reuqired when format is ` + "`" + `sac` + "`" + `", "name": "channel", "in": "formData" } @@ -69,7 +69,7 @@ const docTemplate = `{ "data": { "type": "array", "items": { - "$ref": "#/definitions/publisher.Geophone" + "$ref": "#/definitions/explorer.ExplorerData" } } } @@ -176,7 +176,7 @@ const docTemplate = `{ "data": { "type": "array", "items": { - "$ref": "#/definitions/mseed.MiniSEEDFile" + "$ref": "#/definitions/mseed.miniSeedFileInfo" } } } @@ -214,7 +214,7 @@ const docTemplate = `{ "summary": "AnyShake Observer station status", "responses": { "200": { - "description": "Successfully read station status", + "description": "Successfully read station information", "schema": { "allOf": [ { @@ -224,7 +224,7 @@ const docTemplate = `{ "type": "object", "properties": { "data": { - "$ref": "#/definitions/station.System" + "$ref": "#/definitions/station.stationInfo" } } } @@ -267,7 +267,7 @@ const docTemplate = `{ "data": { "type": "array", "items": { - "$ref": "#/definitions/trace.Event" + "$ref": "#/definitions/trace.seismicEvent" } } } @@ -292,60 +292,109 @@ const docTemplate = `{ } }, "definitions": { - "mseed.MiniSEEDFile": { + "config.Sensor": { "type": "object", "properties": { + "frequency": { + "type": "number" + }, + "fullscale": { + "type": "number" + }, + "resolution": { + "type": "integer" + }, + "sensitivity": { + "type": "number" + }, + "velocity": { + "type": "boolean" + }, + "vref": { + "type": "number" + } + } + }, + "config.Station": { + "type": "object", + "properties": { + "city": { + "type": "string" + }, + "country": { + "type": "string" + }, "name": { "type": "string" }, - "size": { + "owner": { "type": "string" }, - "time": { - "type": "integer" + "region": { + "type": "string" + } + } + }, + "config.Stream": { + "type": "object", + "properties": { + "channel": { + "type": "string" }, - "ttl": { - "type": "integer" + "location": { + "type": "string" + }, + "network": { + "type": "string" + }, + "station": { + "type": "string" } } }, - "publisher.Geophone": { + "explorer.ExplorerData": { "type": "object", "properties": { - "ehe": { + "e_axis": { "type": "array", "items": { "type": "integer" } }, - "ehn": { + "n_axis": { "type": "array", "items": { "type": "integer" } }, - "ehz": { + "sample_rate": { + "type": "integer" + }, + "timestamp": { + "type": "integer" + }, + "z_axis": { "type": "array", "items": { "type": "integer" } - }, - "ts": { - "type": "integer" } } }, - "publisher.System": { + "mseed.miniSeedFileInfo": { "type": "object", "properties": { - "errors": { - "type": "integer" + "name": { + "type": "string" }, - "messages": { + "size": { + "type": "string" + }, + "time": { "type": "integer" }, - "offset": { - "type": "number" + "ttl": { + "type": "integer" } } }, @@ -370,56 +419,7 @@ const docTemplate = `{ } } }, - "station.System": { - "type": "object", - "properties": { - "adc": { - "$ref": "#/definitions/station.adcModel" - }, - "cpu": { - "$ref": "#/definitions/station.cpuModel" - }, - "disk": { - "$ref": "#/definitions/station.diskModel" - }, - "geophone": { - "$ref": "#/definitions/station.geophoneModel" - }, - "memory": { - "$ref": "#/definitions/station.memoryModel" - }, - "os": { - "$ref": "#/definitions/station.osModel" - }, - "position": { - "$ref": "#/definitions/station.positionModel" - }, - "station": { - "$ref": "#/definitions/station.stationModel" - }, - "status": { - "$ref": "#/definitions/publisher.System" - }, - "timestamp": { - "type": "integer" - }, - "uptime": { - "type": "integer" - } - } - }, - "station.adcModel": { - "type": "object", - "properties": { - "fullscale": { - "type": "number" - }, - "resolution": { - "type": "integer" - } - } - }, - "station.cpuModel": { + "station.cpuInfo": { "type": "object", "properties": { "model": { @@ -430,7 +430,7 @@ const docTemplate = `{ } } }, - "station.diskModel": { + "station.diskInfo": { "type": "object", "properties": { "free": { @@ -447,18 +447,36 @@ const docTemplate = `{ } } }, - "station.geophoneModel": { + "station.explorerInfo": { "type": "object", "properties": { - "frequency": { + "device_id": { + "type": "integer" + }, + "elapsed": { + "type": "integer" + }, + "elevation": { "type": "number" }, - "sensitivity": { + "errors": { + "type": "integer" + }, + "latitude": { "type": "number" + }, + "longitude": { + "type": "number" + }, + "received": { + "type": "integer" + }, + "sample_rate": { + "type": "integer" } } }, - "station.memoryModel": { + "station.memoryInfo": { "type": "object", "properties": { "free": { @@ -475,7 +493,7 @@ const docTemplate = `{ } } }, - "station.osModel": { + "station.osInfo": { "type": "object", "properties": { "arch": { @@ -489,44 +507,45 @@ const docTemplate = `{ }, "os": { "type": "string" - } - } - }, - "station.positionModel": { - "type": "object", - "properties": { - "elevation": { - "type": "number" }, - "latitude": { - "type": "number" + "timestamp": { + "type": "integer" }, - "longitude": { - "type": "number" + "uptime": { + "type": "integer" } } }, - "station.stationModel": { + "station.stationInfo": { "type": "object", "properties": { - "location": { - "type": "string" + "cpu": { + "$ref": "#/definitions/station.cpuInfo" }, - "name": { - "type": "string" + "disk": { + "$ref": "#/definitions/station.diskInfo" }, - "network": { - "type": "string" + "explorer": { + "$ref": "#/definitions/station.explorerInfo" + }, + "memory": { + "$ref": "#/definitions/station.memoryInfo" + }, + "os": { + "$ref": "#/definitions/station.osInfo" + }, + "sensor": { + "$ref": "#/definitions/config.Sensor" }, "station": { - "type": "string" + "$ref": "#/definitions/config.Station" }, - "uuid": { - "type": "string" + "stream": { + "$ref": "#/definitions/config.Stream" } } }, - "trace.Event": { + "trace.seismicEvent": { "type": "object", "properties": { "depth": { @@ -536,7 +555,7 @@ const docTemplate = `{ "type": "number" }, "estimation": { - "$ref": "#/definitions/trace.estimation" + "$ref": "#/definitions/trace.seismicEventEstimation" }, "event": { "type": "string" @@ -561,7 +580,7 @@ const docTemplate = `{ } } }, - "trace.estimation": { + "trace.seismicEventEstimation": { "type": "object", "properties": { "p": { @@ -581,8 +600,8 @@ var SwaggerInfo = &swag.Spec{ Host: "", BasePath: "/api/v1", Schemes: []string{}, - Title: "Observer RESTful API documentation", - Description: "This is Observer RESTful API documentation, please set `server_settings.debug` to `false` in `config.json` when deploying to production environment in case of any security issues.", + Title: "AnyShake Observer APIv1", + Description: "This is APIv1 documentation for AnyShake Observer, please set `server_settings.debug` to `false` in `config.json` when deploying to production environment in case of any security issues.", InfoInstanceName: "swagger", SwaggerTemplate: docTemplate, LeftDelim: "{{", diff --git a/docs/swagger.json b/docs/swagger.json index 9ec3e7806..47e29ecac 100644 --- a/docs/swagger.json +++ b/docs/swagger.json @@ -1,8 +1,8 @@ { "swagger": "2.0", "info": { - "description": "This is Observer RESTful API documentation, please set `server_settings.debug` to `false` in `config.json` when deploying to production environment in case of any security issues.", - "title": "Observer RESTful API documentation", + "description": "This is APIv1 documentation for AnyShake Observer, please set `server_settings.debug` to `false` in `config.json` when deploying to production environment in case of any security issues.", + "title": "AnyShake Observer APIv1", "contact": {} }, "basePath": "/api/v1", @@ -21,15 +21,15 @@ "parameters": [ { "type": "integer", - "description": "Start timestamp of the waveform data to be queried, in milliseconds", - "name": "start", + "description": "Start timestamp of the waveform data to be queried, in milliseconds (unix timestamp)", + "name": "start_time", "in": "formData", "required": true }, { "type": "integer", - "description": "End timestamp of the waveform data to be queried, in milliseconds", - "name": "end", + "description": "End timestamp of the waveform data to be queried, in milliseconds (unix timestamp)", + "name": "end_time", "in": "formData", "required": true }, @@ -42,7 +42,7 @@ }, { "type": "string", - "description": "Channel of the waveform, `EHZ`, `EHE` or `EHN`, reuqired when format is `sac`", + "description": "Channel of the waveform, `Z`, `E` or `N`, reuqired when format is `sac`", "name": "channel", "in": "formData" } @@ -61,7 +61,7 @@ "data": { "type": "array", "items": { - "$ref": "#/definitions/publisher.Geophone" + "$ref": "#/definitions/explorer.ExplorerData" } } } @@ -168,7 +168,7 @@ "data": { "type": "array", "items": { - "$ref": "#/definitions/mseed.MiniSEEDFile" + "$ref": "#/definitions/mseed.miniSeedFileInfo" } } } @@ -206,7 +206,7 @@ "summary": "AnyShake Observer station status", "responses": { "200": { - "description": "Successfully read station status", + "description": "Successfully read station information", "schema": { "allOf": [ { @@ -216,7 +216,7 @@ "type": "object", "properties": { "data": { - "$ref": "#/definitions/station.System" + "$ref": "#/definitions/station.stationInfo" } } } @@ -259,7 +259,7 @@ "data": { "type": "array", "items": { - "$ref": "#/definitions/trace.Event" + "$ref": "#/definitions/trace.seismicEvent" } } } @@ -284,60 +284,109 @@ } }, "definitions": { - "mseed.MiniSEEDFile": { + "config.Sensor": { "type": "object", "properties": { + "frequency": { + "type": "number" + }, + "fullscale": { + "type": "number" + }, + "resolution": { + "type": "integer" + }, + "sensitivity": { + "type": "number" + }, + "velocity": { + "type": "boolean" + }, + "vref": { + "type": "number" + } + } + }, + "config.Station": { + "type": "object", + "properties": { + "city": { + "type": "string" + }, + "country": { + "type": "string" + }, "name": { "type": "string" }, - "size": { + "owner": { "type": "string" }, - "time": { - "type": "integer" + "region": { + "type": "string" + } + } + }, + "config.Stream": { + "type": "object", + "properties": { + "channel": { + "type": "string" }, - "ttl": { - "type": "integer" + "location": { + "type": "string" + }, + "network": { + "type": "string" + }, + "station": { + "type": "string" } } }, - "publisher.Geophone": { + "explorer.ExplorerData": { "type": "object", "properties": { - "ehe": { + "e_axis": { "type": "array", "items": { "type": "integer" } }, - "ehn": { + "n_axis": { "type": "array", "items": { "type": "integer" } }, - "ehz": { + "sample_rate": { + "type": "integer" + }, + "timestamp": { + "type": "integer" + }, + "z_axis": { "type": "array", "items": { "type": "integer" } - }, - "ts": { - "type": "integer" } } }, - "publisher.System": { + "mseed.miniSeedFileInfo": { "type": "object", "properties": { - "errors": { - "type": "integer" + "name": { + "type": "string" }, - "messages": { + "size": { + "type": "string" + }, + "time": { "type": "integer" }, - "offset": { - "type": "number" + "ttl": { + "type": "integer" } } }, @@ -362,56 +411,7 @@ } } }, - "station.System": { - "type": "object", - "properties": { - "adc": { - "$ref": "#/definitions/station.adcModel" - }, - "cpu": { - "$ref": "#/definitions/station.cpuModel" - }, - "disk": { - "$ref": "#/definitions/station.diskModel" - }, - "geophone": { - "$ref": "#/definitions/station.geophoneModel" - }, - "memory": { - "$ref": "#/definitions/station.memoryModel" - }, - "os": { - "$ref": "#/definitions/station.osModel" - }, - "position": { - "$ref": "#/definitions/station.positionModel" - }, - "station": { - "$ref": "#/definitions/station.stationModel" - }, - "status": { - "$ref": "#/definitions/publisher.System" - }, - "timestamp": { - "type": "integer" - }, - "uptime": { - "type": "integer" - } - } - }, - "station.adcModel": { - "type": "object", - "properties": { - "fullscale": { - "type": "number" - }, - "resolution": { - "type": "integer" - } - } - }, - "station.cpuModel": { + "station.cpuInfo": { "type": "object", "properties": { "model": { @@ -422,7 +422,7 @@ } } }, - "station.diskModel": { + "station.diskInfo": { "type": "object", "properties": { "free": { @@ -439,18 +439,36 @@ } } }, - "station.geophoneModel": { + "station.explorerInfo": { "type": "object", "properties": { - "frequency": { + "device_id": { + "type": "integer" + }, + "elapsed": { + "type": "integer" + }, + "elevation": { "type": "number" }, - "sensitivity": { + "errors": { + "type": "integer" + }, + "latitude": { "type": "number" + }, + "longitude": { + "type": "number" + }, + "received": { + "type": "integer" + }, + "sample_rate": { + "type": "integer" } } }, - "station.memoryModel": { + "station.memoryInfo": { "type": "object", "properties": { "free": { @@ -467,7 +485,7 @@ } } }, - "station.osModel": { + "station.osInfo": { "type": "object", "properties": { "arch": { @@ -481,44 +499,45 @@ }, "os": { "type": "string" - } - } - }, - "station.positionModel": { - "type": "object", - "properties": { - "elevation": { - "type": "number" }, - "latitude": { - "type": "number" + "timestamp": { + "type": "integer" }, - "longitude": { - "type": "number" + "uptime": { + "type": "integer" } } }, - "station.stationModel": { + "station.stationInfo": { "type": "object", "properties": { - "location": { - "type": "string" + "cpu": { + "$ref": "#/definitions/station.cpuInfo" }, - "name": { - "type": "string" + "disk": { + "$ref": "#/definitions/station.diskInfo" }, - "network": { - "type": "string" + "explorer": { + "$ref": "#/definitions/station.explorerInfo" + }, + "memory": { + "$ref": "#/definitions/station.memoryInfo" + }, + "os": { + "$ref": "#/definitions/station.osInfo" + }, + "sensor": { + "$ref": "#/definitions/config.Sensor" }, "station": { - "type": "string" + "$ref": "#/definitions/config.Station" }, - "uuid": { - "type": "string" + "stream": { + "$ref": "#/definitions/config.Stream" } } }, - "trace.Event": { + "trace.seismicEvent": { "type": "object", "properties": { "depth": { @@ -528,7 +547,7 @@ "type": "number" }, "estimation": { - "$ref": "#/definitions/trace.estimation" + "$ref": "#/definitions/trace.seismicEventEstimation" }, "event": { "type": "string" @@ -553,7 +572,7 @@ } } }, - "trace.estimation": { + "trace.seismicEventEstimation": { "type": "object", "properties": { "p": { diff --git a/docs/swagger.yaml b/docs/swagger.yaml index af2dde9f6..7c4e09021 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -1,41 +1,73 @@ basePath: /api/v1 definitions: - mseed.MiniSEEDFile: + config.Sensor: properties: + frequency: + type: number + fullscale: + type: number + resolution: + type: integer + sensitivity: + type: number + velocity: + type: boolean + vref: + type: number + type: object + config.Station: + properties: + city: + type: string + country: + type: string name: type: string - size: + owner: + type: string + region: + type: string + type: object + config.Stream: + properties: + channel: + type: string + location: + type: string + network: + type: string + station: type: string - time: - type: integer - ttl: - type: integer type: object - publisher.Geophone: + explorer.ExplorerData: properties: - ehe: + e_axis: items: type: integer type: array - ehn: + n_axis: items: type: integer type: array - ehz: + sample_rate: + type: integer + timestamp: + type: integer + z_axis: items: type: integer type: array - ts: - type: integer type: object - publisher.System: + mseed.miniSeedFileInfo: properties: - errors: + name: + type: string + size: + type: string + time: type: integer - messages: + ttl: type: integer - offset: - type: number type: object response.HttpResponse: properties: @@ -51,46 +83,14 @@ definitions: time: type: string type: object - station.System: - properties: - adc: - $ref: '#/definitions/station.adcModel' - cpu: - $ref: '#/definitions/station.cpuModel' - disk: - $ref: '#/definitions/station.diskModel' - geophone: - $ref: '#/definitions/station.geophoneModel' - memory: - $ref: '#/definitions/station.memoryModel' - os: - $ref: '#/definitions/station.osModel' - position: - $ref: '#/definitions/station.positionModel' - station: - $ref: '#/definitions/station.stationModel' - status: - $ref: '#/definitions/publisher.System' - timestamp: - type: integer - uptime: - type: integer - type: object - station.adcModel: - properties: - fullscale: - type: number - resolution: - type: integer - type: object - station.cpuModel: + station.cpuInfo: properties: model: type: string percent: type: number type: object - station.diskModel: + station.diskInfo: properties: free: type: integer @@ -101,14 +101,26 @@ definitions: used: type: integer type: object - station.geophoneModel: + station.explorerInfo: properties: - frequency: + device_id: + type: integer + elapsed: + type: integer + elevation: type: number - sensitivity: + errors: + type: integer + latitude: + type: number + longitude: type: number + received: + type: integer + sample_rate: + type: integer type: object - station.memoryModel: + station.memoryInfo: properties: free: type: integer @@ -119,7 +131,7 @@ definitions: used: type: integer type: object - station.osModel: + station.osInfo: properties: arch: type: string @@ -129,37 +141,38 @@ definitions: type: string os: type: string + timestamp: + type: integer + uptime: + type: integer type: object - station.positionModel: - properties: - elevation: - type: number - latitude: - type: number - longitude: - type: number - type: object - station.stationModel: + station.stationInfo: properties: - location: - type: string - name: - type: string - network: - type: string + cpu: + $ref: '#/definitions/station.cpuInfo' + disk: + $ref: '#/definitions/station.diskInfo' + explorer: + $ref: '#/definitions/station.explorerInfo' + memory: + $ref: '#/definitions/station.memoryInfo' + os: + $ref: '#/definitions/station.osInfo' + sensor: + $ref: '#/definitions/config.Sensor' station: - type: string - uuid: - type: string + $ref: '#/definitions/config.Station' + stream: + $ref: '#/definitions/config.Stream' type: object - trace.Event: + trace.seismicEvent: properties: depth: type: number distance: type: number estimation: - $ref: '#/definitions/trace.estimation' + $ref: '#/definitions/trace.seismicEventEstimation' event: type: string latitude: @@ -175,7 +188,7 @@ definitions: verfied: type: boolean type: object - trace.estimation: + trace.seismicEventEstimation: properties: p: type: number @@ -184,10 +197,10 @@ definitions: type: object info: contact: {} - description: This is Observer RESTful API documentation, please set `server_settings.debug` + description: This is APIv1 documentation for AnyShake Observer, please set `server_settings.debug` to `false` in `config.json` when deploying to production environment in case of any security issues. - title: Observer RESTful API documentation + title: AnyShake Observer APIv1 paths: /history: post: @@ -198,13 +211,15 @@ paths: and 1 hour for SAC parameters: - description: Start timestamp of the waveform data to be queried, in milliseconds + (unix timestamp) in: formData - name: start + name: start_time required: true type: integer - description: End timestamp of the waveform data to be queried, in milliseconds + (unix timestamp) in: formData - name: end + name: end_time required: true type: integer - description: Format of the waveform data to be queried, `json` or `sac` @@ -212,8 +227,8 @@ paths: name: format required: true type: string - - description: Channel of the waveform, `EHZ`, `EHE` or `EHN`, reuqired when - format is `sac` + - description: Channel of the waveform, `Z`, `E` or `N`, reuqired when format + is `sac` in: formData name: channel type: string @@ -229,7 +244,7 @@ paths: - properties: data: items: - $ref: '#/definitions/publisher.Geophone' + $ref: '#/definitions/explorer.ExplorerData' type: array type: object "400": @@ -295,7 +310,7 @@ paths: - properties: data: items: - $ref: '#/definitions/mseed.MiniSEEDFile' + $ref: '#/definitions/mseed.miniSeedFileInfo' type: array type: object "400": @@ -323,13 +338,13 @@ paths: - application/json responses: "200": - description: Successfully read station status + description: Successfully read station information schema: allOf: - $ref: '#/definitions/response.HttpResponse' - properties: data: - $ref: '#/definitions/station.System' + $ref: '#/definitions/station.stationInfo' type: object summary: AnyShake Observer station status /trace: @@ -356,7 +371,7 @@ paths: - properties: data: items: - $ref: '#/definitions/trace.Event' + $ref: '#/definitions/trace.seismicEvent' type: array type: object "400": diff --git a/driver/dao/close.go b/driver/dao/close.go deleted file mode 100644 index 8eb34351c..000000000 --- a/driver/dao/close.go +++ /dev/null @@ -1,13 +0,0 @@ -package dao - -import "gorm.io/gorm" - -func Close(db *gorm.DB) error { - if db == nil { - return nil - } - - sqlDB, err := db.DB() - sqlDB.Close() - return err -} diff --git a/driver/dao/delete.go b/driver/dao/delete.go deleted file mode 100644 index 357010e33..000000000 --- a/driver/dao/delete.go +++ /dev/null @@ -1,8 +0,0 @@ -package dao - -import "gorm.io/gorm" - -func Delete(db *gorm.DB, start, end int64) error { - var records []dbRecord - return db.Table(DB_TABLENAME).Where("ts >= ? AND ts <= ?", start, end).Delete(&records).Error -} diff --git a/driver/dao/engine.go b/driver/dao/engine.go deleted file mode 100644 index a8e9a3660..000000000 --- a/driver/dao/engine.go +++ /dev/null @@ -1,72 +0,0 @@ -package dao - -import ( - "fmt" - - "gorm.io/driver/mysql" - "gorm.io/driver/postgres" - "gorm.io/driver/sqlserver" - "gorm.io/gorm" - "gorm.io/gorm/logger" -) - -type PostgreSQL struct{} - -func (p *PostgreSQL) isCompatible(engine string) bool { - return engine == "postgres" || engine == "postgresql" -} - -func (p *PostgreSQL) openDBConn(host string, port int, username, password, database string) (*gorm.DB, error) { - dsn := fmt.Sprintf( - "host=%s port=%d user=%s password=%s dbname=%s sslmode=disable connect_timeout=%d TimeZone=Etc/GMT", - host, port, username, password, database, int(DB_TIMEOUT.Seconds()), - ) - db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{ - Logger: logger.Default.LogMode(logger.Silent), - SkipDefaultTransaction: true, // Disable transaction to improve performance - }) - if err != nil { - return nil, err - } - sqlDB, err := db.DB() - if err != nil { - return nil, err - } - sqlDB.SetConnMaxLifetime(DB_TIMEOUT) - return db, nil -} - -type MariaDB struct{} - -func (m *MariaDB) isCompatible(engine string) bool { - return engine == "mysql" || engine == "mariadb" -} - -func (m *MariaDB) openDBConn(host string, port int, username, password, database string) (*gorm.DB, error) { - dsn := fmt.Sprintf( - "%s:%s@tcp(%s:%d)/%s?charset=utf8mb4&parseTime=True&timeout=%ds&loc=UTC", - username, password, host, port, database, - int(DB_TIMEOUT.Seconds()), - ) - return gorm.Open(mysql.Open(dsn), &gorm.Config{ - Logger: logger.Default.LogMode(logger.Silent), - SkipDefaultTransaction: true, // Disable transaction to improve performance - }) -} - -type SQLServer struct{} - -func (s *SQLServer) isCompatible(engine string) bool { - return engine == "sqlserver" || engine == "mssql" -} - -func (s *SQLServer) openDBConn(host string, port int, username, password, database string) (*gorm.DB, error) { - dsn := fmt.Sprintf( - "sqlserver://%s:%s@%s:%d?database=%s", - username, password, host, port, database, - ) - return gorm.Open(sqlserver.Open(dsn), &gorm.Config{ - Logger: logger.Default.LogMode(logger.Silent), - SkipDefaultTransaction: true, // Disable transaction to improve performance - }) -} diff --git a/driver/dao/insert.go b/driver/dao/insert.go deleted file mode 100644 index d698850b0..000000000 --- a/driver/dao/insert.go +++ /dev/null @@ -1,10 +0,0 @@ -package dao - -import ( - "github.com/anyshake/observer/publisher" - "gorm.io/gorm" -) - -func Insert(db *gorm.DB, gp *publisher.Geophone) error { - return db.Table(DB_TABLENAME).Create(&dbRecord{Geophone: *gp}).Error -} diff --git a/driver/dao/migrate.go b/driver/dao/migrate.go deleted file mode 100644 index 32d57a0aa..000000000 --- a/driver/dao/migrate.go +++ /dev/null @@ -1,7 +0,0 @@ -package dao - -import "gorm.io/gorm" - -func Migrate(db *gorm.DB) error { - return db.Table(DB_TABLENAME).AutoMigrate(&dbRecord{}) -} diff --git a/driver/dao/open.go b/driver/dao/open.go deleted file mode 100644 index 4f4231bc6..000000000 --- a/driver/dao/open.go +++ /dev/null @@ -1,23 +0,0 @@ -package dao - -import ( - "fmt" - - "gorm.io/gorm" -) - -func Open(host string, port int, engine, username, password, database string) (*gorm.DB, error) { - engines := []dbEngine{ - &PostgreSQL{}, - &MariaDB{}, - &SQLServer{}, - } - for _, e := range engines { - if e.isCompatible(engine) { - return e.openDBConn(host, port, username, password, database) - } - } - - err := fmt.Errorf("database engine %s is unsupported", engine) - return nil, err -} diff --git a/driver/dao/query.go b/driver/dao/query.go deleted file mode 100644 index 1aca352f1..000000000 --- a/driver/dao/query.go +++ /dev/null @@ -1,20 +0,0 @@ -package dao - -import ( - "github.com/anyshake/observer/publisher" - "gorm.io/gorm" -) - -func Query(db *gorm.DB, start, end int64) ([]publisher.Geophone, error) { - var records []dbRecord - err := db.Table(DB_TABLENAME).Select("ts, ehz, ehe, ehn").Where("ts >= ? AND ts <= ?", start, end).Scan(&records).Error - - var result []publisher.Geophone - for _, v := range records { - result = append(result, publisher.Geophone{ - TS: v.TS, EHZ: v.EHZ, EHE: v.EHE, EHN: v.EHN, - }) - } - - return result, err -} diff --git a/driver/dao/types.go b/driver/dao/types.go deleted file mode 100644 index d8224f47a..000000000 --- a/driver/dao/types.go +++ /dev/null @@ -1,23 +0,0 @@ -package dao - -import ( - "time" - - "github.com/anyshake/observer/publisher" - "gorm.io/gorm" -) - -const ( - DB_TIMEOUT = 5 * time.Second - DB_TABLENAME = "geophone_records" -) - -type dbRecord struct { - ID uint `gorm:"primarykey"` - publisher.Geophone -} - -type dbEngine interface { - isCompatible(engine string) bool - openDBConn(host string, port int, username, password, database string) (*gorm.DB, error) -} diff --git a/driver/seedlink/capabilities.go b/driver/seedlink/capabilities.go deleted file mode 100644 index bca5f1f8a..000000000 --- a/driver/seedlink/capabilities.go +++ /dev/null @@ -1,20 +0,0 @@ -package seedlink - -import ( - "net" - - "github.com/anyshake/observer/feature" -) - -type CAPABILITIES struct{} - -// Callback of "CAPABILITIES" command, implements SeedLinkCommandCallback interface -func (*CAPABILITIES) Callback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - _, err := conn.Write([]byte(RES_OK)) - return err -} - -// Fallback of "CAPABILITIES" command, implements SeedLinkCommandCallback interface -func (*CAPABILITIES) Fallback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Close() -} diff --git a/driver/seedlink/data.go b/driver/seedlink/data.go deleted file mode 100644 index d0166a542..000000000 --- a/driver/seedlink/data.go +++ /dev/null @@ -1,31 +0,0 @@ -package seedlink - -import ( - "net" - "strconv" - - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/duration" -) - -type DATA struct{} - -// Callback of "DATA" command, implements SeedLinkCommandCallback interface -func (*DATA) Callback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - cl.StartTime, _ = duration.Timestamp(options.Status.System.Offset) - if len(args) > 0 { - seq, err := strconv.ParseInt(args[0], 16, 64) - if err != nil { - conn.Write([]byte(RES_ERR)) - return err - } - cl.Sequence = seq + 1 - } - _, err := conn.Write([]byte(RES_OK)) - return err -} - -// Fallback of "DATA" command, implements SeedLinkCommandCallback interface -func (*DATA) Fallback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Close() -} diff --git a/driver/seedlink/end.go b/driver/seedlink/end.go deleted file mode 100644 index fc1bfc98a..000000000 --- a/driver/seedlink/end.go +++ /dev/null @@ -1,80 +0,0 @@ -package seedlink - -import ( - "net" - "time" - - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/publisher" - "github.com/ostafen/clover/v2/query" -) - -type END struct{} - -// Callback of "END" command, implements SeedLinkCommandCallback interface -func (*END) Callback(sl *SeedLinkGlobal, client *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - if client.StartTime.IsZero() { - _, err := conn.Write([]byte(RES_ERR)) - return err - } - - // Query from buffer database - records, err := sl.SeedLinkBuffer.Database.FindAll(query.NewQuery(sl.SeedLinkBuffer.Collection). - Where(query.Field("ts").Gt(client.StartTime.UTC().UnixMilli()). - And(query.Field("ts").Lt(client.EndTime.UTC().UnixMilli())), - )) - if err != nil { - conn.Write([]byte(RES_ERR)) - return err - } - - // Enter stream mode - client.Streaming = true - - for _, record := range records { - var recordMap map[string]any - record.Unmarshal(&recordMap) - channelMap := map[string]string{ - "EHZ": recordMap["ehz"].(string), - "EHE": recordMap["ehe"].(string), - "EHN": recordMap["ehn"].(string), - } - for _, channel := range client.Channels { - data, ok := channelMap[channel] - if !ok { - continue - } - var ( - timestamp = int64(recordMap["ts"].(float64)) - bufTime = time.UnixMilli(timestamp).UTC() - ) - if bufTime.After(client.StartTime.UTC()) && bufTime.Before(client.EndTime.UTC()) { - countDataArr, err := publisher.DecodeInt32Array(data) - if err != nil { - return err - } - err = SendSLPacket(conn, client, SeedLinkPacket{ - Channel: channel, Timestamp: timestamp, Count: countDataArr, - }) - if err != nil { - return err - } - } - } - } - - // Subscribe to the publisher - go publisher.Subscribe( - &options.Status.Geophone, &client.Streaming, - func(gp *publisher.Geophone) error { - return streamer(conn, client, gp) - }, - ) - - return nil -} - -// Fallback of "END" command, implements SeedLinkCommandCallback interface -func (*END) Fallback(sl *SeedLinkGlobal, client *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Close() -} diff --git a/driver/seedlink/hello.go b/driver/seedlink/hello.go deleted file mode 100644 index 828e4e0ba..000000000 --- a/driver/seedlink/hello.go +++ /dev/null @@ -1,22 +0,0 @@ -package seedlink - -import ( - "fmt" - "net" - - "github.com/anyshake/observer/feature" -) - -type HELLO struct{} - -// Callback of "HELLO" command, implements SeedLinkCommandCallback interface -func (*HELLO) Callback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - station := sl.Organization - _, err := conn.Write([]byte(fmt.Sprintf("%s\r\n%s\r\n", RELEASE, station))) - return err -} - -// Fallback of "HELLO" command, implements SeedLinkCommandCallback interface -func (*HELLO) Fallback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Close() -} diff --git a/driver/seedlink/info.go b/driver/seedlink/info.go deleted file mode 100644 index cceb8bc27..000000000 --- a/driver/seedlink/info.go +++ /dev/null @@ -1,223 +0,0 @@ -package seedlink - -import ( - "fmt" - "net" - "time" - - "github.com/anyshake/observer/feature" - "github.com/bclswl0827/mseedio" - "github.com/clbanning/anyxml" -) - -type INFO struct{} - -// Callback of "INFO <...>" command, implements SeedLinkCommandCallback interface -func (i *INFO) Callback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - err := fmt.Errorf("arg error") - if len(args) < 1 { - return err - } - - var ( - action = args[0] - dataBytes []byte - ) - switch action { - case "ID": - state := sl.SeedLinkState - dataBytes, err = i.getID(state, FLAG_INF) - case "STATIONS": - var ( - state = sl.SeedLinkState - stations = sl.Stations - ) - dataBytes, err = i.getStations(state, stations) - case "CAPABILITIES", "CONNECTIONS": - var ( - state = sl.SeedLinkState - capabilities = sl.Capabilities - ) - dataBytes, err = i.getCapabilities(state, capabilities) - case "STREAMS": - var ( - streams = sl.Streams - stations = sl.Stations - ) - dataBytes, err = i.getStreams(sl.SeedLinkState, streams, stations) - default: - state := sl.SeedLinkState - dataBytes, err = i.getID(state, FLAG_ERR) - } - if err != nil { - return err - } - - _, err = conn.Write(dataBytes) - return err -} - -// Fallback of "INFO <...>" command, implements SeedLinkCommandCallback interface -func (i *INFO) Fallback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Write([]byte(RES_ERR)) -} - -// getID returns response of "INFO ID" command -func (i *INFO) getID(state SeedLinkState, flag int) ([]byte, error) { - result := map[string]string{ - "-software": state.Software, - "-started": state.StartTime, - "-organization": state.Organization, - } - xmlData, err := anyxml.Xml(result, "seedlink") - if err != nil { - return []byte(RES_ERR), err - } - // Set XML header and return response - xmlBody := i.setXMLHeader(xmlData) - currentTime := time.Now().UTC() - return i.setResponse(xmlBody, flag, currentTime) -} - -// getStations returns response of "INFO STATIONS" command -func (i *INFO) getStations(state SeedLinkState, staions []SeedLinkStation) ([]byte, error) { - result := map[string]any{ - "-software": state.Software, - "-started": state.StartTime, - "-organization": state.Organization, - "station": staions, - } - xmlData, err := anyxml.Xml(result, "seedlink") - if err != nil { - return []byte(RES_ERR), err - } - // Set XML header and return response - xmlBody := i.setXMLHeader(xmlData) - currentTime := time.Now().UTC() - return i.setResponse(xmlBody, FLAG_INF, currentTime) -} - -// getCapabilities returns response of "INFO CAPABILITIES" command -func (i *INFO) getCapabilities(state SeedLinkState, capabilities []SeedLinkCapability) ([]byte, error) { - result := map[string]any{ - "-software": state.Software, - "-started": state.StartTime, - "-organization": state.Organization, - "capability": capabilities, - } - xmlData, err := anyxml.Xml(result, "seedlink") - if err != nil { - return []byte(RES_ERR), err - } - // Set XML header and return response - xmlBody := i.setXMLHeader(xmlData) - currentTime := time.Now().UTC() - return i.setResponse(xmlBody, FLAG_INF, currentTime) -} - -// getStreams returns response of "INFO STREAMS" command -func (i *INFO) getStreams(state SeedLinkState, streams []SeedLinkStream, stations []SeedLinkStation) ([]byte, error) { - type respModel struct { - SeedLinkStation - Streams []SeedLinkStream `xml:"stream"` - StreamCheck string `xml:"stream_check,attr"` - } - result := map[any]any{ - "-software": state.Software, - "-started": state.StartTime, - "-organization": state.Organization, - } - var resp []respModel - for _, v := range stations { - // Match stream by station name - var availableStreams []SeedLinkStream - for _, s := range streams { - if s.Station == v.Station { - availableStreams = append(availableStreams, s) - } - } - resp = append(resp, respModel{ - SeedLinkStation: v, - Streams: availableStreams, - StreamCheck: "enabled", - }) - } - result["station"] = resp - xmlData, err := anyxml.Xml(result, "seedlink") - if err != nil { - return []byte(RES_ERR), err - } - // Set XML header and return response - xmlBody := i.setXMLHeader(xmlData) - currentTime := time.Now().UTC() - return i.setResponse(xmlBody, FLAG_INF, currentTime) -} - -// setXMLHeader sets XML header to body and return string -func (i *INFO) setXMLHeader(body []byte) []byte { - header := []byte(``) - return append(header, body...) -} - -// setResponse assembles response in MiniSeed format -func (i *INFO) setResponse(body []byte, errFlag int, startTime time.Time) ([]byte, error) { - // Convert body to int32 array - bodyBuffer := []int32{} - for _, v := range body { - bodyBuffer = append(bodyBuffer, int32(v)) - } - // Set channel code by error flag - channelCode := "INF" - if errFlag == FLAG_ERR { - channelCode = "ERR" - } - // Initialize MiniSeed data - var miniseed mseedio.MiniSeedData - miniseed.Init(mseedio.ASCII, mseedio.MSBFIRST) - // Split data into 512 bytes each - bodyLength := len(bodyBuffer) - dataLength := (512 - mseedio.FIXED_SECTION_LENGTH - mseedio.BLOCKETTE100X_SECTION_LENGTH) - fullLength := bodyLength + mseedio.FIXED_SECTION_LENGTH + mseedio.BLOCKETTE100X_SECTION_LENGTH - blockCount := fullLength / 512 - // "SLINFO*" or "SLINFO" is signature - // * indicates non-final block, indicates final block - blockHeader := []byte{'S', 'L', 'I', 'N', 'F', 'O', ' ', '*'} - // Append each block to MiniSeed data - var resultBuffer []byte - for i := 0; i <= blockCount; i++ { - startIndex := i * dataLength - endIndex := (i + 1) * dataLength - if i == blockCount { - // Set final block flag - blockHeader[7] = ' ' - endIndex = bodyLength - } - err := miniseed.Append( - bodyBuffer[startIndex:endIndex], - &mseedio.AppendOptions{ - SequenceNumber: fmt.Sprintf("%06d", i+1), - ChannelCode: channelCode, - StartTime: startTime, - StationCode: "INFO ", - LocationCode: " ", - NetworkCode: "SL", - SampleRate: 0, - }, - ) - if err != nil { - return nil, err - } - // Encode MiniSeed data - res, err := miniseed.Encode(mseedio.APPEND, mseedio.MSBFIRST) - if err != nil { - return nil, err - } - // Each block should be 512 bytes - if len(res) < 512 { - // Fill with 0x00 if length is less than 512 - res = append(res, make([]byte, 512-len(res))...) - } - resultBuffer = append(resultBuffer, append(blockHeader, res...)...) - } - return resultBuffer, nil -} diff --git a/driver/seedlink/packet.go b/driver/seedlink/packet.go deleted file mode 100644 index e35f71bfb..000000000 --- a/driver/seedlink/packet.go +++ /dev/null @@ -1,68 +0,0 @@ -package seedlink - -import ( - "fmt" - "net" - "time" - - "github.com/bclswl0827/mseedio" -) - -func SendSLPacket(conn net.Conn, client *SeedLinkClient, data SeedLinkPacket) error { - // Create data chunks to adapt to SeedLink packet size - var countGroup [][]int32 - if len(data.Count) > CHUNK_SIZE { - for i := 0; i < len(data.Count); i += CHUNK_SIZE { - if i+CHUNK_SIZE > len(data.Count) { - countGroup = append(countGroup, data.Count[i:]) - } else { - countGroup = append(countGroup, data.Count[i:i+CHUNK_SIZE]) - } - } - } else { - countGroup = append(countGroup, data.Count) - } - - dataSpanMs := 1000 / float64(len(data.Count)) - for i, c := range countGroup { - // Generate MiniSEED record - var miniseed mseedio.MiniSeedData - miniseed.Init(mseedio.STEIM2, mseedio.MSBFIRST) - err := miniseed.Append(c, &mseedio.AppendOptions{ - ChannelCode: data.Channel, - StationCode: client.Station, - LocationCode: client.Location, - NetworkCode: client.Network, - SampleRate: float64(len(data.Count)), - SequenceNumber: fmt.Sprintf("%06d", client.Sequence), - StartTime: time.UnixMilli(data.Timestamp + int64(float64(i*CHUNK_SIZE)*dataSpanMs)).UTC(), - }) - if err != nil { - return err - } - - // Get MiniSEED data bytes always in 512 bytes - miniseed.Series[0].BlocketteSection.RecordLength = 9 - slData, err := miniseed.Encode(mseedio.OVERWRITE, mseedio.MSBFIRST) - if err != nil { - return err - } - - // Prepend and send SeedLink sequence number - slSeq := []byte(fmt.Sprintf("SL%06X", client.Sequence)) - _, err = conn.Write(slSeq) - if err != nil { - return err - } - - // Send SeedLink packet data - _, err = conn.Write(slData) - if err != nil { - return err - } - - client.Sequence++ - } - - return nil -} diff --git a/driver/seedlink/select.go b/driver/seedlink/select.go deleted file mode 100644 index bfd4c891b..000000000 --- a/driver/seedlink/select.go +++ /dev/null @@ -1,32 +0,0 @@ -package seedlink - -import ( - "net" - - "github.com/anyshake/observer/feature" -) - -type SELECT struct{} - -// Callback of "SELECT <...>" command, implements SeedLinkCommandCallback interface -func (*SELECT) Callback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - if len(args) < 1 { - _, err := conn.Write([]byte(RES_ERR)) - return err - } else { - if len(args[0]) < 5 { - _, err := conn.Write([]byte(RES_ERR)) - return err - } else { - cl.Location = args[0][:2] - cl.Channels = append(cl.Channels, args[0][2:5]) - } - } - _, err := conn.Write([]byte(RES_OK)) - return err -} - -// Fallback of "SELECT <...>" command, implements SeedLinkCommandCallback interface -func (*SELECT) Fallback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Close() -} diff --git a/driver/seedlink/station.go b/driver/seedlink/station.go deleted file mode 100644 index 5a9e7ef22..000000000 --- a/driver/seedlink/station.go +++ /dev/null @@ -1,23 +0,0 @@ -package seedlink - -import ( - "net" - - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/text" -) - -type STATION struct{} - -// Callback of "STATION <...> <...>" command, implements SeedLinkCommandCallback interface -func (*STATION) Callback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - cl.Station = text.TruncateString(args[0], 5) - cl.Network = text.TruncateString(args[1], 2) - _, err := conn.Write([]byte(RES_OK)) - return err -} - -// Fallback of "STATION <...> <...>" command, implements SeedLinkCommandCallback interface -func (*STATION) Fallback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Close() -} diff --git a/driver/seedlink/time.go b/driver/seedlink/time.go deleted file mode 100644 index 837d39314..000000000 --- a/driver/seedlink/time.go +++ /dev/null @@ -1,89 +0,0 @@ -package seedlink - -import ( - "net" - "strconv" - "strings" - "time" - - "github.com/anyshake/observer/feature" -) - -type TIME struct{} - -// Callback of "TIME <...>" command, implements SeedLinkCommandCallback interface -func (t *TIME) Callback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, streamer SeedLinkStreamer, conn net.Conn, args ...string) error { - resCode := RES_OK - switch len(args) { - case 2: - endTime, err := t.getTimeFromArg(args[1]) - if err != nil { - resCode = RES_ERR - } else { - cl.EndTime = endTime - } - fallthrough - case 1: - startTime, err := t.getTimeFromArg(args[0]) - if err != nil { - resCode = RES_ERR - } else { - cl.StartTime = startTime - } - default: - resCode = RES_ERR - } - - _, err := conn.Write([]byte(resCode)) - return err -} - -// Fallback of "TIME <...>" command, implements SeedLinkCommandCallback interface -func (*TIME) Fallback(sl *SeedLinkGlobal, cl *SeedLinkClient, options *feature.FeatureOptions, conn net.Conn, args ...string) { - conn.Close() -} - -func (*TIME) getTimeFromArg(timeStr string) (time.Time, error) { - if len(timeStr) != 19 { - return time.Time{}, nil - } - splitTimeStr := strings.Split(timeStr, ",") - if len(splitTimeStr) != 6 { - return time.Time{}, nil - } - - // Format: YYYY,MM,DD,hh,mm,ss - // Example: 2024,01,16,07,15,16 - year, err := strconv.Atoi(splitTimeStr[0]) - if err != nil { - return time.Time{}, err - } - - monthInt, err := strconv.Atoi(splitTimeStr[1]) - if err != nil { - return time.Time{}, err - } - - month := time.Month(monthInt) - day, err := strconv.Atoi(splitTimeStr[2]) - if err != nil { - return time.Time{}, err - } - - hour, err := strconv.Atoi(splitTimeStr[3]) - if err != nil { - return time.Time{}, err - } - - minute, err := strconv.Atoi(splitTimeStr[4]) - if err != nil { - return time.Time{}, err - } - - second, err := strconv.Atoi(splitTimeStr[5]) - if err != nil { - return time.Time{}, err - } - - return time.Date(year, month, day, hour, minute, second, 0, time.UTC), nil -} diff --git a/driver/seedlink/types.go b/driver/seedlink/types.go deleted file mode 100644 index 8a29b7cc2..000000000 --- a/driver/seedlink/types.go +++ /dev/null @@ -1,113 +0,0 @@ -package seedlink - -import ( - "encoding/xml" - "net" - "time" - - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/publisher" - c "github.com/ostafen/clover/v2" -) - -const ( - CHUNK_SIZE int = 100 - ORGANIZATION string = "anyshake.org" - RELEASE string = "SeedLink v3.1 AnyShake Edition (Very basic implementation in Go) :: SLPROTO:3.1 CAP EXTREPLY NSWILDCARD BATCH WS:13 :: Constructing Realtime Seismic Network Ambitiously." -) - -// SeedLink error flags -const ( - FLAG_INF = iota - FLAG_ERR -) - -// SeedLink response codes -const ( - RES_OK = "OK\r\n" - RES_ERR = "ERROR\r\n" -) - -// SeedLink main daemon config & state -type SeedLinkGlobal struct { - SeedLinkState - SeedLinkBuffer - Streams []SeedLinkStream - Stations []SeedLinkStation - Capabilities []SeedLinkCapability -} - -// SeedLink data buffer -type SeedLinkBuffer struct { - Collection string - Duration time.Duration - Database *c.DB -} - -// SeedLink basic state -type SeedLinkState struct { - Software string - StartTime string - Organization string -} - -// Station field model of INFO STATIONS command -type SeedLinkStation struct { - XMLName xml.Name `xml:"station"` - BeginSequence string `xml:"begin_seq,attr"` - EndSequence string `xml:"end_seq,attr"` - Station string `xml:"name,attr"` - Network string `xml:"network,attr"` - Description string `xml:"description,attr"` -} - -// Stream field model of INFO STREAMS command -type SeedLinkStream struct { - XMLName xml.Name `xml:"stream"` - BeginTime string `xml:"begin_time,attr"` - EndTime string `xml:"end_time,attr"` - SeedName string `xml:"seedname,attr"` - Location string `xml:"location,attr"` - Type string `xml:"type,attr"` - // Exclusive attribute to match station - Station string `xml:"station,attr"` -} - -// Capability field model of INFO CAPABILITY command -type SeedLinkCapability struct { - XMLName xml.Name `xml:"capability"` - Name string `xml:"name,attr"` -} - -// Built-in commands of SeedLink -type SeedLinkCommand struct { - HasExtraArgs bool - SeedLinkCommandCallback -} - -// SeedLink client state -type SeedLinkClient struct { - Streaming bool - Sequence int64 - Network string - Station string - Location string - Channels []string - StartTime time.Time - EndTime time.Time -} - -// SeedLink data packet model -type SeedLinkPacket struct { - Count []int32 - Channel string - Timestamp int64 -} - -type SeedLinkStreamer func(conn net.Conn, client *SeedLinkClient, pub *publisher.Geophone) error - -// Interface for SeedLink command callback & fallback -type SeedLinkCommandCallback interface { - Callback(*SeedLinkGlobal, *SeedLinkClient, *feature.FeatureOptions, SeedLinkStreamer, net.Conn, ...string) error - Fallback(*SeedLinkGlobal, *SeedLinkClient, *feature.FeatureOptions, net.Conn, ...string) -} diff --git a/driver/serial/close.go b/driver/serial/close.go deleted file mode 100644 index 9f96ad715..000000000 --- a/driver/serial/close.go +++ /dev/null @@ -1,11 +0,0 @@ -package serial - -import "io" - -func Close(port io.ReadWriteCloser) error { - if port == nil { - return nil - } - - return port.Close() -} diff --git a/driver/serial/filter.go b/driver/serial/filter.go deleted file mode 100644 index 7dc473b78..000000000 --- a/driver/serial/filter.go +++ /dev/null @@ -1,28 +0,0 @@ -package serial - -import ( - "bytes" - "fmt" - "io" - "math" - "time" -) - -func Filter(port io.ReadWriteCloser, signature []byte) ([]byte, error) { - header := make([]byte, len(signature)) - - for i := 0; i < math.MaxUint8; i++ { - _, err := port.Read(header) - if err != nil { - return nil, err - } - - if bytes.Equal(header, signature) { - return header, nil - } else { - time.Sleep(time.Millisecond) - } - } - - return header, fmt.Errorf("failed to filter header") -} diff --git a/driver/serial/open.go b/driver/serial/open.go deleted file mode 100644 index 87319f25f..000000000 --- a/driver/serial/open.go +++ /dev/null @@ -1,27 +0,0 @@ -package serial - -import ( - "io" - - "github.com/bclswl0827/go-serial" -) - -func Open(device string, baud int) (io.ReadWriteCloser, error) { - port, err := serial.Open(device, - serial.WithHUPCL(true), - serial.WithDataBits(8), - serial.WithBaudrate(baud), - serial.WithReadTimeout(5), - serial.WithWriteTimeout(5), - serial.WithParity(serial.NoParity), - serial.WithStopBits(serial.OneStopBit), - ) - if err != nil { - return nil, err - } - - port.SetDTR(true) - port.SetRTS(true) - - return port, nil -} diff --git a/driver/serial/read.go b/driver/serial/read.go deleted file mode 100644 index 0efe7fd13..000000000 --- a/driver/serial/read.go +++ /dev/null @@ -1,37 +0,0 @@ -package serial - -import ( - "fmt" - "io" - "time" -) - -func Read(r io.Reader, buf []byte, timeout time.Duration) (n int, err error) { - min := len(buf) - - if len(buf) < min { - return 0, io.ErrShortBuffer - } - - start := time.Now() - for n < min { - if time.Since(start) > timeout { - return 0, fmt.Errorf("timeout due to no response") - } - - nn, err := r.Read(buf[n:]) - if err != nil { - return 0, err - } - - n += nn - } - - if n >= min { - err = nil - } else if n > 0 && err == io.EOF { - err = io.ErrUnexpectedEOF - } - - return n, err -} diff --git a/publisher/decode.go b/drivers/dao/array/decode.go similarity index 86% rename from publisher/decode.go rename to drivers/dao/array/decode.go index 593b6f083..6b60f91a3 100644 --- a/publisher/decode.go +++ b/drivers/dao/array/decode.go @@ -1,4 +1,4 @@ -package publisher +package array import ( "fmt" @@ -6,7 +6,7 @@ import ( "strings" ) -func DecodeInt32Array(val any) ([]int32, error) { +func (data Int32Array) Decode(val any) ([]int32, error) { var strArr []string switch v := val.(type) { case string: diff --git a/publisher/encode.go b/drivers/dao/array/encode.go similarity index 73% rename from publisher/encode.go rename to drivers/dao/array/encode.go index 4b4613612..45e78dfa7 100644 --- a/publisher/encode.go +++ b/drivers/dao/array/encode.go @@ -1,10 +1,10 @@ -package publisher +package array import ( "strconv" ) -func EncodeInt32Array(data []int32) string { +func (data Int32Array) Encode() string { var rawText string for ii, vv := range data { rawText += strconv.Itoa(int(vv)) diff --git a/publisher/array.go b/drivers/dao/array/types.go similarity index 56% rename from publisher/array.go rename to drivers/dao/array/types.go index 13c508f06..afb54d559 100644 --- a/publisher/array.go +++ b/drivers/dao/array/types.go @@ -1,11 +1,11 @@ -package publisher +package array -import ( - "database/sql/driver" -) +import "database/sql/driver" + +type Int32Array []int32 func (i *Int32Array) Scan(val any) error { - intArr, err := DecodeInt32Array(val) + intArr, err := i.Decode(val) if err != nil { return err } @@ -15,5 +15,5 @@ func (i *Int32Array) Scan(val any) error { } func (i Int32Array) Value() (driver.Value, error) { - return EncodeInt32Array(i), nil + return i.Encode(), nil } diff --git a/drivers/dao/close.go b/drivers/dao/close.go new file mode 100644 index 000000000..fcd9b1759 --- /dev/null +++ b/drivers/dao/close.go @@ -0,0 +1,16 @@ +package dao + +import "gorm.io/gorm" + +func Close(dbObj *gorm.DB) error { + if dbObj == nil { + return nil + } + + sqlDB, err := dbObj.DB() + if err != nil { + return err + } + + return sqlDB.Close() +} diff --git a/drivers/dao/mariadb.go b/drivers/dao/mariadb.go new file mode 100644 index 000000000..67b78a92d --- /dev/null +++ b/drivers/dao/mariadb.go @@ -0,0 +1,26 @@ +package dao + +import ( + "fmt" + "time" + + "gorm.io/driver/mysql" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +type MariaDB struct{} + +func (m *MariaDB) match(engine string) bool { + return engine == "mysql" || engine == "mariadb" +} + +func (m *MariaDB) open(host string, port int, username, password, database string, timeout time.Duration) (*gorm.DB, error) { + dsn := fmt.Sprintf( + "%s:%s@tcp(%s:%d)/%s?charset=utf8mb4&parseTime=True&timeout=%ds&loc=UTC", + username, password, host, port, database, int(timeout.Seconds()), + ) + return gorm.Open(mysql.Open(dsn), &gorm.Config{ + Logger: logger.Default.LogMode(logger.Silent), + }) +} diff --git a/drivers/dao/migrate.go b/drivers/dao/migrate.go new file mode 100644 index 000000000..3b554a229 --- /dev/null +++ b/drivers/dao/migrate.go @@ -0,0 +1,10 @@ +package dao + +import ( + "gorm.io/gorm" +) + +func Migrate[T any](dbObj *gorm.DB, tableImpl ITable[T]) error { + tableRecord := tableImpl.GetModel() + return dbObj.Table(tableImpl.GetName()).AutoMigrate(&tableRecord) +} diff --git a/drivers/dao/open.go b/drivers/dao/open.go new file mode 100644 index 000000000..fe44356ed --- /dev/null +++ b/drivers/dao/open.go @@ -0,0 +1,24 @@ +package dao + +import ( + "fmt" + + "gorm.io/gorm" +) + +func Open(host string, port int, engineName, username, password, database string) (*gorm.DB, error) { + engines := []engine{ + &PostgreSQL{}, + &MariaDB{}, + &SQLServer{}, + &SQLite{}, + } + for _, e := range engines { + if e.match(engineName) { + return e.open(host, port, username, password, database, TIMEOUT_THRESHOLD) + } + } + + err := fmt.Errorf("database engine %s is unsupported", engineName) + return nil, err +} diff --git a/drivers/dao/postgresql.go b/drivers/dao/postgresql.go new file mode 100644 index 000000000..d4ce920bb --- /dev/null +++ b/drivers/dao/postgresql.go @@ -0,0 +1,35 @@ +package dao + +import ( + "fmt" + "time" + + "gorm.io/driver/postgres" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +type PostgreSQL struct{} + +func (p *PostgreSQL) match(engine string) bool { + return engine == "postgres" || engine == "postgresql" +} + +func (p *PostgreSQL) open(host string, port int, username, password, database string, timeout time.Duration) (*gorm.DB, error) { + dsn := fmt.Sprintf( + "host=%s port=%d user=%s password=%s dbname=%s sslmode=disable connect_timeout=%d TimeZone=Etc/GMT", + host, port, username, password, database, int(timeout.Seconds()), + ) + db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{ + Logger: logger.Default.LogMode(logger.Silent), + }) + if err != nil { + return nil, err + } + sqlDB, err := db.DB() + if err != nil { + return nil, err + } + sqlDB.SetConnMaxLifetime(timeout) + return db, nil +} diff --git a/drivers/dao/sqlite.go b/drivers/dao/sqlite.go new file mode 100644 index 000000000..31963c845 --- /dev/null +++ b/drivers/dao/sqlite.go @@ -0,0 +1,27 @@ +package dao + +import ( + "fmt" + "time" + + "github.com/bclswl0827/sqlite" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +type SQLite struct{} + +func (s *SQLite) match(engine string) bool { + return engine == "sqlite3" || engine == "sqlite" +} + +func (s *SQLite) open(host string, port int, username, password, database string, timeout time.Duration) (*gorm.DB, error) { + dsn := fmt.Sprintf("file://%s?cache=shared&mode=rwc&_pragma=busy_timeout(%d)", database, int(timeout.Seconds())) + db, err := gorm.Open(sqlite.Open(dsn), &gorm.Config{ + Logger: logger.Default.LogMode(logger.Silent), + }) + sqlDB, _ := db.DB() + sqlDB.SetMaxOpenConns(1) + + return db, err +} diff --git a/drivers/dao/sqlserver.go b/drivers/dao/sqlserver.go new file mode 100644 index 000000000..f30106b53 --- /dev/null +++ b/drivers/dao/sqlserver.go @@ -0,0 +1,26 @@ +package dao + +import ( + "fmt" + "time" + + "gorm.io/driver/sqlserver" + "gorm.io/gorm" + "gorm.io/gorm/logger" +) + +type SQLServer struct{} + +func (s *SQLServer) match(engine string) bool { + return engine == "sqlserver" || engine == "mssql" +} + +func (s *SQLServer) open(host string, port int, username, password, database string, timeout time.Duration) (*gorm.DB, error) { + dsn := fmt.Sprintf( + "sqlserver://%s:%s@%s:%d?database=%s", + username, password, host, port, database, + ) + return gorm.Open(sqlserver.Open(dsn), &gorm.Config{ + Logger: logger.Default.LogMode(logger.Silent), + }) +} diff --git a/drivers/dao/tables/adc_count.go b/drivers/dao/tables/adc_count.go new file mode 100644 index 000000000..df2c0b942 --- /dev/null +++ b/drivers/dao/tables/adc_count.go @@ -0,0 +1,23 @@ +package tables + +import ( + "github.com/anyshake/observer/drivers/dao" + "github.com/anyshake/observer/drivers/dao/array" +) + +type AdcCount struct { + dao.BaseModel + Timestamp int64 `gorm:"column:timestamp;not null;index"` + Z_Axis array.Int32Array `gorm:"column:z_axis;type:text"` + E_Axis array.Int32Array `gorm:"column:e_axis;type:text"` + N_Axis array.Int32Array `gorm:"column:n_axis;type:text"` + SampleRate int `gorm:"column:sample_rate;not null"` +} + +func (t AdcCount) GetModel() AdcCount { + return AdcCount{} +} + +func (t AdcCount) GetName() string { + return "adc_count" +} diff --git a/drivers/dao/types.go b/drivers/dao/types.go new file mode 100644 index 000000000..0b3279eca --- /dev/null +++ b/drivers/dao/types.go @@ -0,0 +1,25 @@ +package dao + +import ( + "time" + + "gorm.io/gorm" +) + +const TIMEOUT_THRESHOLD = 5 * time.Second + +type engine interface { + match(engine string) bool + open(host string, port int, username, password, database string, timeout time.Duration) (*gorm.DB, error) +} + +type BaseModel struct { + // This field is the primary key of the table + PrimaryKey uint64 `gorm:"primary_key"` + CreatedAt int64 `gorm:"column:created_at;autoUpdateTime:milli;<-:create"` +} + +type ITable[T any] interface { + GetModel() T + GetName() string +} diff --git a/drivers/explorer/impl.go b/drivers/explorer/impl.go new file mode 100644 index 000000000..023ea5e4a --- /dev/null +++ b/drivers/explorer/impl.go @@ -0,0 +1,426 @@ +package explorer + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "math" + "time" + "unsafe" + + "github.com/anyshake/observer/utils/fifo" + cmap "github.com/orcaman/concurrent-map/v2" + messagebus "github.com/vardius/message-bus" +) + +var ( + legacy_packet_frame_header = []byte{0xFC, 0x1B} + mainline_packet_frame_header = []byte{0xF1, 0xD9} + mainline_packet_frame_tail = []byte{0xD9, 0xF1} +) + +// In legacy mode, each packet contains 3 channels, n samples per channel. +// The packet is sent at an interval of (1000 / sample rate) milliseconds. +// Set n = 5 (also in Explorer) fits the common sample rates (25, 50, 100, 125 Hz). +const legacy_packet_channel_size = 5 + +// Legacy packet structure, fixed size. +// Each channel has a checksum, which is the XOR of all bytes in the channel. +type legacyPacket struct { + Z_Axis [legacy_packet_channel_size]int32 + E_Axis [legacy_packet_channel_size]int32 + N_Axis [legacy_packet_channel_size]int32 + Checksum [3]uint8 +} + +func (g *legacyPacket) length() int { + return int(unsafe.Sizeof(g.Z_Axis) + unsafe.Sizeof(g.E_Axis) + unsafe.Sizeof(g.N_Axis) + unsafe.Sizeof(g.Checksum)) +} + +func (g *legacyPacket) decode(data []byte) error { + err := binary.Read(bytes.NewReader(data), binary.LittleEndian, g) + if err != nil { + return err + } + + // Using XOR algorithm + calc_checksum := [3]uint8{0, 0, 0} + z_axis_offset := 0 + e_axis_offset := int(unsafe.Sizeof(int32(0)) * legacy_packet_channel_size) + n_axis_offset := int(unsafe.Sizeof(int32(0)) * legacy_packet_channel_size * 2) + for i := z_axis_offset; i < e_axis_offset; i++ { + calc_checksum[0] ^= data[i] + } + for i := e_axis_offset; i < n_axis_offset; i++ { + calc_checksum[1] ^= data[i] + } + for i := e_axis_offset; i < len(data)-int(unsafe.Sizeof([3]uint8{})); i++ { + calc_checksum[2] ^= data[i] + } + if bytes.Equal(g.Checksum[:], calc_checksum[:]) { + return fmt.Errorf("checksum mismatch, expected %v, got %v", g.Checksum, calc_checksum) + } + + return nil +} + +// Mainline packet header structure, fixed size. +// 34 bytes of header data without the frame header bytes. +type mainlinePacketHeader struct { + sampleRate uint16 + timestamp int64 + deviceId uint32 + latitude float32 + longitude float32 + elevation float32 + reserved uint64 + checksum uint8 +} + +func (g *mainlinePacketHeader) length() int { + return int(unsafe.Sizeof(g.sampleRate) + + unsafe.Sizeof(g.timestamp) + + unsafe.Sizeof(g.deviceId) + + unsafe.Sizeof(g.latitude) + + unsafe.Sizeof(g.longitude) + + unsafe.Sizeof(g.elevation) + + unsafe.Sizeof(g.reserved) + + unsafe.Sizeof(g.checksum)) +} + +func (g *mainlinePacketHeader) decode(data []byte) error { + g.checksum = data[len(data)-1] + + // Using XOR algorithm + calc_checksum := uint8(0) + for i := 0; i < len(data[:34]); i++ { + calc_checksum ^= data[i] + } + if calc_checksum != g.checksum { + return fmt.Errorf("checksum mismatch, expected %d, got %d", g.checksum, calc_checksum) + } + + g.sampleRate = binary.LittleEndian.Uint16(data[:2]) + g.timestamp = int64(binary.LittleEndian.Uint64(data[2:10])) + g.deviceId = binary.LittleEndian.Uint32(data[10:14]) + g.latitude = math.Float32frombits(binary.LittleEndian.Uint32(data[14:18])) + g.longitude = math.Float32frombits(binary.LittleEndian.Uint32(data[18:22])) + g.elevation = math.Float32frombits(binary.LittleEndian.Uint32(data[22:26])) + g.reserved = binary.LittleEndian.Uint64(data[26:34]) + + return nil +} + +// Mainline packet channel structure, variable number of samples. +// Flexibly sized packet channel depending on the sample rate. +type mainlinePacketChannel struct { + z_axis []int32 + e_axis []int32 + n_axis []int32 + checksum uint32 +} + +func (g *mainlinePacketChannel) length(sampleRate int) int { + return 3*sampleRate*int(unsafe.Sizeof(int32(0))) + // Z, E, N axis data + int(unsafe.Sizeof(uint32(0))) // Checksum of Z, E, N axis +} + +func (g *mainlinePacketChannel) decode(data []byte, sampleRate int) error { + g.checksum = binary.LittleEndian.Uint32(data[len(data)-4:]) + + // Convert little-endian to big-endian for checksum calculation + for i := 0; i < len(data)-4; i += 4 { + data[i], data[i+1], data[i+2], data[i+3] = data[i+3], data[i+2], data[i+1], data[i] + } + + // Using CRC-32/MPEG-2 algorithm + calc_checksum := uint32(0xFFFFFFFF) + for _, v := range data[:len(data)-4] { + calc_checksum ^= uint32(v) << 24 + for i := 0; i < 8; i++ { + if (calc_checksum & 0x80000000) != 0 { + calc_checksum = (calc_checksum << 1) ^ 0x04C11DB7 + } else { + calc_checksum <<= 1 + } + } + } + if calc_checksum != g.checksum { + return fmt.Errorf("checksum mismatch, expected %d, got %d", g.checksum, calc_checksum) + } + + // Restore the original data, note that the byte order is big-endian + g.z_axis = make([]int32, sampleRate) + binary.Read(bytes.NewReader(data[:sampleRate*int(unsafe.Sizeof(int32(0)))]), binary.BigEndian, g.z_axis) + g.e_axis = make([]int32, sampleRate) + binary.Read(bytes.NewReader(data[sampleRate*int(unsafe.Sizeof(int32(0))):2*sampleRate*int(unsafe.Sizeof(int32(0)))]), binary.BigEndian, g.e_axis) + g.n_axis = make([]int32, sampleRate) + binary.Read(bytes.NewReader(data[2*sampleRate*int(unsafe.Sizeof(int32(0))):3*sampleRate*int(unsafe.Sizeof(int32(0)))]), binary.BigEndian, g.n_axis) + + return nil +} + +// Mainline packet tail structure, fixed size +// 9 bytes of tail data without the frame tail bytes +type mainlinePacketTail struct { + reserved uint64 + checksum uint8 +} + +func (g *mainlinePacketTail) length() int { + return int(unsafe.Sizeof(g.reserved) + unsafe.Sizeof(g.checksum)) +} + +func (g *mainlinePacketTail) decode(data []byte) error { + g.checksum = data[8] + + // Using XOR algorithm + calc_checksum := uint8(0) + for i := 0; i < len(data); i++ { + calc_checksum ^= data[i] + } + if calc_checksum != g.checksum { + return fmt.Errorf("checksum mismatch, expected %d, got %d", g.checksum, calc_checksum) + } + + g.reserved = binary.LittleEndian.Uint64(data[:8]) + return nil +} + +type ExplorerDriverImpl struct { + // Dependencies for legacy mode + legacyPacket legacyPacket + // Dependencies for mainline mode + mainlinePacketHeader mainlinePacketHeader + mainlinePacketChannel mainlinePacketChannel + mainlinePacketTail mainlinePacketTail +} + +func (e *ExplorerDriverImpl) handleReadLegacyPacket(deps *ExplorerDependency) { + fifoBuffer := fifo.New(16384) + + // Read data from the transport continuously + go func() { + buf := make([]byte, 1024) + for { + select { + case <-deps.CancelToken.Done(): + return + default: + n, err := deps.Transport.Read(buf, 10*time.Millisecond, false) + if err != nil { + return + } + + fifoBuffer.Write(buf[:n]) + } + } + }() + + // Read data from the FIFO buffer continuously + var ( + dataBuffer = []legacyPacket{} + ticker = time.NewTicker(1 * time.Second) + ) + for { + select { + case <-deps.CancelToken.Done(): + return + case <-ticker.C: + if len(dataBuffer) > 0 { + deps.Health.Received++ + deps.Health.UpdatedAt = time.Now() + t, _ := deps.FallbackTime.GetTime() + var ( + z_axis_count []int32 + e_axis_count []int32 + n_axis_count []int32 + ) + for _, packet := range dataBuffer { + z_axis_count = append(z_axis_count, packet.Z_Axis[:]...) + e_axis_count = append(e_axis_count, packet.E_Axis[:]...) + n_axis_count = append(n_axis_count, packet.N_Axis[:]...) + } + + sampleRate := len(dataBuffer) * legacy_packet_channel_size + deps.Health.SampleRate = sampleRate + finalPacket := ExplorerData{ + SampleRate: sampleRate, + Z_Axis: z_axis_count, + E_Axis: e_axis_count, + N_Axis: n_axis_count, + Timestamp: t.UTC().UnixMilli(), + } + deps.messageBus.Publish("explorer", &finalPacket) + + dataBuffer = []legacyPacket{} + } + default: + for { + dat, err := fifoBuffer.Read(legacy_packet_frame_header, len(legacy_packet_frame_header)+e.legacyPacket.length()) + if err != nil { + break + } + + // Read the packet data + err = e.legacyPacket.decode(dat[len(legacy_packet_frame_header):]) + if err != nil { + deps.Health.Errors++ + } else { + dataBuffer = append(dataBuffer, e.legacyPacket) + } + } + } + } +} + +func (e *ExplorerDriverImpl) handleReadMainlinePacket(deps *ExplorerDependency) { + for { + select { + case <-deps.CancelToken.Done(): + return + default: + // Find the header sync bytes + ok, _ := deps.Transport.Filter(mainline_packet_frame_header, 2*time.Second) + if !ok { + continue + } + + // Read header section and update dependency data + headerBuf := make([]byte, e.mainlinePacketHeader.length()) + _, err := deps.Transport.Read(headerBuf, time.Second, false) + if err != nil { + continue + } + err = e.mainlinePacketHeader.decode(headerBuf) + if err != nil { + deps.Health.Errors++ + continue + } + if deps.Config.Latitude != 0 && deps.Config.Longitude != 0 && deps.Config.Elevation != 0 { + deps.Config.Latitude = float64(e.mainlinePacketHeader.latitude) + deps.Config.Longitude = float64(e.mainlinePacketHeader.longitude) + deps.Config.Elevation = float64(e.mainlinePacketHeader.elevation) + } + + // Get data section packet size and read the channel data + sampleRate := int(e.mainlinePacketHeader.sampleRate) + dataBuf := make([]byte, e.mainlinePacketChannel.length(sampleRate)) + _, err = deps.Transport.Read(dataBuf, time.Second, false) + if err != nil { + continue + } + err = e.mainlinePacketChannel.decode(dataBuf, sampleRate) + if err != nil { + deps.Health.Errors++ + continue + } + + // Get tail section data, check tail bytes of the packet + tailBuf := make([]byte, e.mainlinePacketTail.length()+len(mainline_packet_frame_tail)) + _, err = deps.Transport.Read(tailBuf, time.Second, false) + if err != nil { + continue + } + frameTailSliceIndex := len(tailBuf) - len(mainline_packet_frame_tail) + if !bytes.Equal(tailBuf[frameTailSliceIndex:], mainline_packet_frame_tail) { + deps.Health.Errors++ + continue + } + err = e.mainlinePacketTail.decode(tailBuf[:frameTailSliceIndex]) + if err != nil { + deps.Health.Errors++ + continue + } + + // Publish the data to the message bus + deps.Health.SampleRate = sampleRate + finalPacket := ExplorerData{ + SampleRate: sampleRate, + Z_Axis: e.mainlinePacketChannel.z_axis, + E_Axis: e.mainlinePacketChannel.e_axis, + N_Axis: e.mainlinePacketChannel.n_axis, + } + if e.mainlinePacketHeader.timestamp != 0 { + finalPacket.Timestamp = e.mainlinePacketHeader.timestamp + } else { + t, _ := deps.FallbackTime.GetTime() + finalPacket.Timestamp = t.UTC().UnixMilli() + } + deps.messageBus.Publish("explorer", &finalPacket) + deps.Health.UpdatedAt = time.Now() + deps.Health.Received++ + } + } +} + +func (e *ExplorerDriverImpl) readerDaemon(deps *ExplorerDependency) { + if deps.Config.LegacyMode { + e.handleReadLegacyPacket(deps) + } else { + e.handleReadMainlinePacket(deps) + } +} + +func (e *ExplorerDriverImpl) IsAvailable(deps *ExplorerDependency) bool { + buf := make([]byte, 128) + _, err := deps.Transport.Read(buf, 2*time.Second, true) + return err == nil +} + +func (e *ExplorerDriverImpl) Init(deps *ExplorerDependency) error { + deps.Health.StartTime, _ = deps.FallbackTime.GetTime() + + deps.subscribers = cmap.New[ExplorerEventHandler]() + deps.messageBus = messagebus.New(1024) + deps.Config.DeviceId = math.MaxUint32 + + // Get device ID in EEPROM + if !deps.Config.LegacyMode { + readTimeout := 5 * time.Second + startTime := time.Now() + for time.Since(startTime) < readTimeout { + ok, _ := deps.Transport.Filter(mainline_packet_frame_header, 2*time.Second) + if !ok { + continue + } + headerBuf := make([]byte, e.mainlinePacketHeader.length()) + _, err := deps.Transport.Read(headerBuf, time.Second, false) + if err != nil { + continue + } + err = e.mainlinePacketHeader.decode(headerBuf) + if err != nil { + continue + } + deps.Config.DeviceId = e.mainlinePacketHeader.deviceId + break + } + if time.Since(startTime) >= readTimeout { + return errors.New("failed to get device ID, please check the device") + } + } + + go e.readerDaemon(deps) + return nil +} + +func (e *ExplorerDriverImpl) Subscribe(deps *ExplorerDependency, clientId string, handler ExplorerEventHandler) error { + if _, ok := deps.subscribers.Get(clientId); ok { + return errors.New("this client has already subscribed") + } + deps.subscribers.Set(clientId, handler) + deps.messageBus.Subscribe("explorer", handler) + return nil +} + +func (e *ExplorerDriverImpl) Unsubscribe(deps *ExplorerDependency, clientId string) error { + fn, ok := deps.subscribers.Get(clientId) + if !ok { + return errors.New("this client has not subscribed") + } + deps.messageBus.Unsubscribe("explorer", fn) + deps.subscribers.Remove(clientId) + return nil +} diff --git a/drivers/explorer/types.go b/drivers/explorer/types.go new file mode 100644 index 000000000..ae6e8dc39 --- /dev/null +++ b/drivers/explorer/types.go @@ -0,0 +1,62 @@ +package explorer + +import ( + "context" + "time" + + "github.com/anyshake/observer/drivers/transport" + "github.com/anyshake/observer/utils/timesource" + cmap "github.com/orcaman/concurrent-map/v2" + messagebus "github.com/vardius/message-bus" +) + +const ( + EXPLORER_CHANNEL_CODE_Z = "Z" + EXPLORER_CHANNEL_CODE_E = "E" + EXPLORER_CHANNEL_CODE_N = "N" +) + +type ExplorerHealth struct { + SampleRate int + Errors int64 + Received int64 + StartTime time.Time + UpdatedAt time.Time // Last local system time the health information was updated +} + +type ExplorerConfig struct { + NoGeophone bool + LegacyMode bool + DeviceId uint32 + Latitude float64 + Longitude float64 + Elevation float64 +} + +type ExplorerDependency struct { + Health ExplorerHealth + Config ExplorerConfig + FallbackTime timesource.Source + CancelToken context.Context + Transport transport.TransportDriver + messageBus messagebus.MessageBus + subscribers cmap.ConcurrentMap[string, ExplorerEventHandler] +} + +type ExplorerData struct { + SampleRate int `json:"sample_rate"` + Timestamp int64 `json:"timestamp"` + Z_Axis []int32 `json:"z_axis"` + E_Axis []int32 `json:"e_axis"` + N_Axis []int32 `json:"n_axis"` +} + +type ExplorerEventHandler = func(data *ExplorerData) + +type ExplorerDriver interface { + readerDaemon(deps *ExplorerDependency) + IsAvailable(deps *ExplorerDependency) bool + Init(deps *ExplorerDependency) error + Subscribe(deps *ExplorerDependency, clientId string, handler ExplorerEventHandler) error + Unsubscribe(deps *ExplorerDependency, clientId string) error +} diff --git a/drivers/transport/new.go b/drivers/transport/new.go new file mode 100644 index 000000000..2cbd1951e --- /dev/null +++ b/drivers/transport/new.go @@ -0,0 +1,16 @@ +package transport + +import "fmt" + +func New(dsn *TransportDependency) (TransportDriver, error) { + engines := map[string]TransportDriver{ + "serial": &TransportDriverSerialImpl{}, + "tcp": &TransportDriverTcpImpl{}, + } + engine, ok := engines[dsn.Engine] + if !ok { + return nil, fmt.Errorf("engine %s is not supported", dsn.Engine) + } + + return engine, nil +} diff --git a/drivers/transport/serial_impl.go b/drivers/transport/serial_impl.go new file mode 100644 index 000000000..9d8dff2c2 --- /dev/null +++ b/drivers/transport/serial_impl.go @@ -0,0 +1,120 @@ +package transport + +import ( + "bytes" + "errors" + "fmt" + "net/url" + "strconv" + "sync" + "time" + + "github.com/bclswl0827/go-serial" +) + +type TransportDriverSerialImpl struct { + conn *serial.Port + mutex sync.Mutex +} + +func (t *TransportDriverSerialImpl) Open(deps *TransportDependency) error { + u, err := url.Parse(deps.DSN) + if err != nil { + return err + } + + deviceName := u.Host + if len(deviceName) == 0 { + deviceName = u.Path + } + + baudrate, err := strconv.Atoi(u.Query().Get("baudrate")) + if err != nil { + return err + } + + conn, err := serial.Open( + deviceName, + serial.WithHUPCL(true), + serial.WithDataBits(8), + serial.WithWriteTimeout(10), + serial.WithBaudrate(baudrate), + serial.WithParity(serial.NoParity), + serial.WithStopBits(serial.OneStopBit), + ) + if err != nil { + return fmt.Errorf("%v %s", err, deviceName) + } + + conn.SetDTR(false) + conn.SetRTS(false) + + t.conn = conn + return nil +} + +func (t *TransportDriverSerialImpl) Close() error { + if t.conn == nil { + return errors.New("connection is not opened") + } + + t.mutex.Lock() + defer t.mutex.Unlock() + + return t.conn.Close() +} + +func (t *TransportDriverSerialImpl) Read(buf []byte, timeout time.Duration, flush bool) (int, error) { + if t.conn == nil { + return 0, errors.New("connection is not opened") + } + + t.mutex.Lock() + defer t.mutex.Unlock() + + if flush { + t.conn.ResetInputBuffer() + } + t.conn.SetReadTimeout(int(timeout.Milliseconds())) + return t.conn.Read(buf) +} + +func (t *TransportDriverSerialImpl) Write(buf []byte, flush bool) (int, error) { + if t.conn == nil { + return 0, errors.New("connection is not opened") + } + + t.mutex.Lock() + defer t.mutex.Unlock() + + if flush { + t.conn.ResetOutputBuffer() + } + return t.conn.Write(buf) +} + +func (t *TransportDriverSerialImpl) Filter(signature []byte, timeout time.Duration) (bool, error) { + if t.conn == nil { + return false, errors.New("connection is not opened") + } + + t.mutex.Lock() + defer t.mutex.Unlock() + + t.conn.SetReadTimeout(int(timeout.Milliseconds())) + + t.conn.ResetInputBuffer() + t.conn.ResetOutputBuffer() + + header := make([]byte, len(signature)) + _, err := t.conn.Read(header) + if err != nil { + return false, err + } + + if bytes.Equal(header, signature) { + return true, nil + } + + return false, nil +} diff --git a/drivers/transport/tcp_impl.go b/drivers/transport/tcp_impl.go new file mode 100644 index 000000000..1f3fc377f --- /dev/null +++ b/drivers/transport/tcp_impl.go @@ -0,0 +1,69 @@ +package transport + +import ( + "bytes" + "net" + "net/url" + "sync" + "time" +) + +type TransportDriverTcpImpl struct { + conn net.Conn + mutex sync.Mutex +} + +func (t *TransportDriverTcpImpl) Open(deps *TransportDependency) error { + u, err := url.Parse(deps.DSN) + if err != nil { + return err + } + + conn, err := net.Dial("tcp", u.Host) + if err != nil { + return err + } + + t.conn = conn + return nil +} + +func (t *TransportDriverTcpImpl) Close() error { + t.mutex.Lock() + defer t.mutex.Unlock() + + return t.conn.Close() +} + +func (t *TransportDriverTcpImpl) Read(buf []byte, timeout time.Duration, flush bool) (int, error) { + t.mutex.Lock() + defer t.mutex.Unlock() + + t.conn.SetReadDeadline(time.Now().Add(timeout)) + return t.conn.Read(buf) +} + +func (t *TransportDriverTcpImpl) Write(buf []byte, flush bool) (int, error) { + t.mutex.Lock() + defer t.mutex.Unlock() + + t.conn.SetWriteDeadline(time.Now().Add(time.Second)) + return t.conn.Write(buf) +} + +func (t *TransportDriverTcpImpl) Filter(signature []byte, timeout time.Duration) (bool, error) { + t.mutex.Lock() + defer t.mutex.Unlock() + + header := make([]byte, len(signature)) + _, err := t.conn.Read(header) + if err != nil { + return false, err + } + + if bytes.Equal(header, signature) { + return true, nil + } + + return false, nil +} diff --git a/drivers/transport/types.go b/drivers/transport/types.go new file mode 100644 index 000000000..1ad04d4b7 --- /dev/null +++ b/drivers/transport/types.go @@ -0,0 +1,16 @@ +package transport + +import "time" + +type TransportDependency struct { + DSN string + Engine string +} + +type TransportDriver interface { + Open(deps *TransportDependency) error + Close() error + Read(buf []byte, timeout time.Duration, flush bool) (int, error) + Write(buf []byte, flush bool) (int, error) + Filter(signature []byte, timeout time.Duration) (bool, error) +} diff --git a/feature/archiver/callbacks.go b/feature/archiver/callbacks.go deleted file mode 100644 index ac44887bd..000000000 --- a/feature/archiver/callbacks.go +++ /dev/null @@ -1,26 +0,0 @@ -package archiver - -import ( - "github.com/anyshake/observer/driver/dao" - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/logger" - "github.com/anyshake/observer/utils/text" - "github.com/fatih/color" -) - -func (a *Archiver) OnStart(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgMagenta, false) -} - -func (a *Archiver) OnStop(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgBlue, false) -} - -func (a *Archiver) OnReady(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, "1 message has been archived", color.FgGreen, false) -} - -func (a *Archiver) OnError(options *feature.FeatureOptions, err error) { - dao.Close(options.Database) - logger.Print(MODULE, err, color.FgRed, false) -} diff --git a/feature/archiver/cleanup.go b/feature/archiver/cleanup.go deleted file mode 100644 index 14d718569..000000000 --- a/feature/archiver/cleanup.go +++ /dev/null @@ -1,32 +0,0 @@ -package archiver - -import ( - "time" - - "github.com/anyshake/observer/driver/dao" - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/utils/duration" - "gorm.io/gorm" -) - -func (a *Archiver) handleCleanup(status *publisher.Status, db *gorm.DB, lifeCycle int) { - for { - // Wait until system is ready - if status.ReadyTime.IsZero() { - time.Sleep(time.Second) - continue - } - - // Get start and end time - currentTime, _ := duration.Timestamp(status.System.Offset) - endTime := currentTime.Add(-time.Duration(lifeCycle) * time.Hour * 24) - - // Remove expired records - err := dao.Delete(db, 0, endTime.UnixMilli()) - if err != nil { - a.OnError(nil, err) - } - - time.Sleep(time.Hour) - } -} diff --git a/feature/archiver/daemon.go b/feature/archiver/daemon.go deleted file mode 100644 index a47ba58ec..000000000 --- a/feature/archiver/daemon.go +++ /dev/null @@ -1,79 +0,0 @@ -package archiver - -import ( - "fmt" - "os" - "os/signal" - "sync" - "syscall" - - "github.com/anyshake/observer/driver/dao" - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/utils/logger" - "github.com/fatih/color" -) - -func (a *Archiver) Run(options *feature.FeatureOptions, waitGroup *sync.WaitGroup) { - if !options.Config.Archiver.Enable { - a.OnStop(options, "service is disabled") - return - } else { - waitGroup.Add(1) - defer waitGroup.Done() - } - - // Connect to database - a.OnStart(options, "service has started") - pdb, err := dao.Open( - options.Config.Archiver.Host, - options.Config.Archiver.Port, - options.Config.Archiver.Engine, - options.Config.Archiver.Username, - options.Config.Archiver.Password, - options.Config.Archiver.Database, - ) - if err != nil { - a.OnError(options, err) - os.Exit(1) - } - - // Migrate database - err = dao.Migrate(pdb) - if err != nil { - a.OnError(options, err) - os.Exit(1) - } - options.Database = pdb - defer dao.Close(pdb) - - // Start cleanup routine if life cycle bigger than 0 - lifeCycle := options.Config.MiniSEED.LifeCycle - if lifeCycle > 0 { - go a.handleCleanup(options.Status, pdb, lifeCycle) - } - - // Archive when new message arrived - expressionForSubscribe := true - go func() { - publisher.Subscribe( - &options.Status.Geophone, - &expressionForSubscribe, - func(gp *publisher.Geophone) error { - return a.handleMessage(gp, options, pdb) - }, - ) - - err = fmt.Errorf("service exited with an error") - a.OnError(options, err) - os.Exit(1) - }() - - // Receive interrupt signals - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, os.Interrupt, syscall.SIGTERM) - - // Wait for interrupt signals - <-sigCh - logger.Print(MODULE, "closing database connection", color.FgBlue, true) -} diff --git a/feature/archiver/message.go b/feature/archiver/message.go deleted file mode 100644 index 5f8afbf6b..000000000 --- a/feature/archiver/message.go +++ /dev/null @@ -1,20 +0,0 @@ -package archiver - -import ( - "github.com/anyshake/observer/driver/dao" - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/publisher" - "gorm.io/gorm" -) - -func (a *Archiver) handleMessage(gp *publisher.Geophone, options *feature.FeatureOptions, pdb *gorm.DB) error { - err := dao.Insert(pdb, gp) - if err != nil { - a.OnError(options, err) - dao.Close(pdb) - return err - } - - a.OnReady(options) - return nil -} diff --git a/feature/archiver/types.go b/feature/archiver/types.go deleted file mode 100644 index 27bc3c3e8..000000000 --- a/feature/archiver/types.go +++ /dev/null @@ -1,5 +0,0 @@ -package archiver - -const MODULE string = "archiver" - -type Archiver struct{} diff --git a/feature/geophone/callbacks.go b/feature/geophone/callbacks.go deleted file mode 100644 index 0d564e16b..000000000 --- a/feature/geophone/callbacks.go +++ /dev/null @@ -1,39 +0,0 @@ -package geophone - -import ( - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/logger" - "github.com/anyshake/observer/utils/text" - "github.com/fatih/color" -) - -func (g *Geophone) OnStart(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgMagenta, false) -} - -func (g *Geophone) OnStop(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgBlue, true) -} - -func (g *Geophone) OnReady(options *feature.FeatureOptions, v ...any) { - if len(v) == 0 { - logger.Print(MODULE, "1 full packet received", color.FgGreen, false) - return - } - if !options.Status.ReadyTime.IsZero() { - // Appending packet data to buffer - packet := v[0].(Packet) - for i := 0; i < options.Config.Serial.Packet; i++ { - options.Status.Buffer.EHZ = append(options.Status.Buffer.EHZ, packet.EHZ[i]) - options.Status.Buffer.EHE = append(options.Status.Buffer.EHE, packet.EHE[i]) - options.Status.Buffer.EHN = append(options.Status.Buffer.EHN, packet.EHN[i]) - } - } else { - logger.Print(MODULE, "waiting for time alignment", color.FgYellow, false) - } -} - -func (g *Geophone) OnError(options *feature.FeatureOptions, err error) { - options.Status.System.Errors++ - logger.Print(MODULE, err, color.FgRed, false) -} diff --git a/feature/geophone/checksum.go b/feature/geophone/checksum.go deleted file mode 100644 index 0d6f5c7be..000000000 --- a/feature/geophone/checksum.go +++ /dev/null @@ -1,38 +0,0 @@ -package geophone - -import ( - "fmt" - "unsafe" -) - -func (g *Geophone) getChecksum(data []int32) byte { - checksum := uint8(0) - - for i := 0; i < len(data); i++ { - bytes := (*[4]byte)(unsafe.Pointer(&data[i]))[:] - - for j := 0; j < int(unsafe.Sizeof(int32(0))); j++ { - checksum ^= bytes[j] - } - } - - return checksum -} - -func (g *Geophone) isChecksumCorrect(packet *Packet) error { - var ( - EHZ = g.getChecksum(packet.EHZ[:]) - EHE = g.getChecksum(packet.EHE[:]) - EHN = g.getChecksum(packet.EHN[:]) - ) - if EHZ != packet.Checksum[0] || - EHE != packet.Checksum[1] || - EHN != packet.Checksum[2] { - return fmt.Errorf( - "expected checksum %v, got %v", - packet.Checksum, [3]uint8{EHZ, EHE, EHN}, - ) - } - - return nil -} diff --git a/feature/geophone/daemon.go b/feature/geophone/daemon.go deleted file mode 100644 index 0bfee72ef..000000000 --- a/feature/geophone/daemon.go +++ /dev/null @@ -1,112 +0,0 @@ -package geophone - -import ( - "fmt" - "os" - "os/signal" - "sync" - "syscall" - "time" - - "github.com/anyshake/observer/driver/serial" - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/duration" -) - -func (g *Geophone) Run(options *feature.FeatureOptions, waitGroup *sync.WaitGroup) { - var ( - device = options.Config.Serial.Device - baud = options.Config.Serial.Baud - packetLen = options.Config.Serial.Packet - ) - - // Increase wait group counter - waitGroup.Add(1) - defer waitGroup.Done() - - // Open serial port - port, err := serial.Open(device, baud) - if err != nil { - g.OnError(options, err) - os.Exit(1) - } - defer serial.Close(port) - - g.Ticker = time.NewTicker(READY_THRESHOLD) - defer g.Ticker.Stop() - - go func() { - // Initialize geophone packet - var packet Packet - g.OnStart(options, "service has started") - - lastRead := time.Now().UTC() - for { - // Read from serial port by channel packet length - err := g.Read(port, options.Config, &packet, packetLen) - if err != nil { - serial.Close(port) - g.OnError(options, err) - time.Sleep(time.Millisecond * 100) - - // Reopen serial port - port, err = serial.Open(device, baud) - if err != nil { - g.OnError(options, err) - os.Exit(1) - } - - // Reset device after reopen - err = g.Reset(port) - if err != nil { - g.OnError(options, err) - } - - lastRead = time.Now().UTC() - continue - } else { - g.OnReady(options, packet) - } - - // Reset device if reached TIMEOUT_THRESHOLD - if duration.Difference(time.Now().UTC(), lastRead) >= TIMEOUT_THRESHOLD { - err := fmt.Errorf("reset due to unusual gap") - g.OnError(options, err) - - err = g.Reset(port) - if err != nil { - g.OnError(options, err) - } - } - - lastRead = time.Now().UTC() - } - }() - - go func() { - for { - <-g.Ticker.C - currentTime, _ := duration.Timestamp(options.Status.System.Offset) - timeDiff := duration.Difference(currentTime, options.Status.LastRecvTime) - // Set packet timestamp, note that the timestamp in buffer is the start of the packet - options.Status.Buffer.TS = currentTime.UnixMilli() - timeDiff.Milliseconds() - // Set last received time is the current timestamp - options.Status.LastRecvTime = currentTime - options.Status.System.Messages++ - // Copy and reset buffer - options.Status.Geophone = *options.Status.Buffer - options.Status.Buffer.EHZ = []int32{} - options.Status.Buffer.EHE = []int32{} - options.Status.Buffer.EHN = []int32{} - g.OnReady(options) - } - }() - - // Receive interrupt signals - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, os.Interrupt, syscall.SIGTERM) - - // Wait for interrupt signals - <-sigCh - g.OnStop(options, "closing serial connection") -} diff --git a/feature/geophone/read.go b/feature/geophone/read.go deleted file mode 100644 index 3f74181f0..000000000 --- a/feature/geophone/read.go +++ /dev/null @@ -1,68 +0,0 @@ -package geophone - -import ( - "bytes" - "encoding/binary" - "io" - - "github.com/anyshake/observer/config" - "github.com/anyshake/observer/driver/serial" -) - -func (g *Geophone) Read(port io.ReadWriteCloser, conf *config.Conf, packet *Packet, packetLen int) error { - // Filter frame header - _, err := serial.Filter(port, SYNC_WORD[:]) - if err != nil { - return err - } - - // Read data frame - checksumLen := len(packet.Checksum) - buf := make([]byte, g.getPacketSize(packetLen, checksumLen)) - n, err := serial.Read(port, buf, TIMEOUT_THRESHOLD) - if err != nil { - return err - } - - // Allocate memory for data frame - packet.EHZ = make([]int32, packetLen) - packet.EHE = make([]int32, packetLen) - packet.EHN = make([]int32, packetLen) - - // Create reader for data frame - reader := bytes.NewReader(buf[:n]) - - // Parse EHZ channel - err = binary.Read(reader, binary.LittleEndian, packet.EHZ) - if err != nil { - return err - } - - // Parse EHE channel - err = binary.Read(reader, binary.LittleEndian, packet.EHE) - if err != nil { - return err - } - - // Parse EHN channel - err = binary.Read(reader, binary.LittleEndian, packet.EHN) - if err != nil { - return err - } - - // Parse checksum - for i := 0; i < checksumLen; i++ { - err = binary.Read(reader, binary.LittleEndian, &packet.Checksum[i]) - if err != nil { - return err - } - } - - // Compare checksum - err = g.isChecksumCorrect(packet) - if err != nil { - return err - } - - return nil -} diff --git a/feature/geophone/reset.go b/feature/geophone/reset.go deleted file mode 100644 index d4fcd1eac..000000000 --- a/feature/geophone/reset.go +++ /dev/null @@ -1,17 +0,0 @@ -package geophone - -import ( - "io" - - "github.com/anyshake/observer/driver/serial" -) - -func (g *Geophone) Reset(port io.ReadWriteCloser) error { - _, err := port.Write(RESET_WORD[:]) - if err != nil { - return err - } - - serial.Filter(port, ACK_WORD[:]) - return nil -} diff --git a/feature/geophone/size.go b/feature/geophone/size.go deleted file mode 100644 index dc4e857b6..000000000 --- a/feature/geophone/size.go +++ /dev/null @@ -1,6 +0,0 @@ -package geophone - -func (g *Geophone) getPacketSize(packetLen, checksumLen int) int { - // channelLen*packetLen*int32 + checksumLen + 1 - return checksumLen*packetLen*4 + checksumLen + 1 -} diff --git a/feature/geophone/types.go b/feature/geophone/types.go deleted file mode 100644 index 60aa253a5..000000000 --- a/feature/geophone/types.go +++ /dev/null @@ -1,32 +0,0 @@ -package geophone - -import "time" - -const MODULE string = "geophone" - -const ( - // READY_THRESHOLD should be strictly 1 second - READY_THRESHOLD time.Duration = 1 * time.Second - // TIMEOUT_THRESHOLD should be greater than READY_THRESHOLD - TIMEOUT_THRESHOLD time.Duration = 3 * time.Second -) - -var ( - // RESET_WORD resets geophone ADC module - RESET_WORD = [...]byte{0x61} - // SYNC_WORD indicates a data packet is following - SYNC_WORD = [...]byte{0xFC, 0x1B} - // ACK_WORD indicates a valid command is received - ACK_WORD = [...]byte{0xFC, 0x2B} -) - -type Geophone struct { - Ticker *time.Ticker -} - -type Packet struct { - EHZ []int32 // Vertical - EHE []int32 // East-West - EHN []int32 // North-South - Checksum [3]uint8 -} diff --git a/feature/miniseed/callbacks.go b/feature/miniseed/callbacks.go deleted file mode 100644 index bc18b95ff..000000000 --- a/feature/miniseed/callbacks.go +++ /dev/null @@ -1,29 +0,0 @@ -package miniseed - -import ( - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/logger" - "github.com/anyshake/observer/utils/text" - "github.com/fatih/color" -) - -func (m *MiniSEED) OnStart(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgMagenta, false) -} - -func (m *MiniSEED) OnStop(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgBlue, false) -} - -func (m *MiniSEED) OnReady(options *feature.FeatureOptions, v ...any) { - switch v[0].(string) { - case "append": - logger.Print(MODULE, "1 record has been append", color.FgGreen, false) - case "write": - logger.Print(MODULE, "1 record has been written", color.FgGreen, false) - } -} - -func (m *MiniSEED) OnError(options *feature.FeatureOptions, err error) { - logger.Print(MODULE, err, color.FgRed, false) -} diff --git a/feature/miniseed/cleanup.go b/feature/miniseed/cleanup.go deleted file mode 100644 index 3eff77df7..000000000 --- a/feature/miniseed/cleanup.go +++ /dev/null @@ -1,45 +0,0 @@ -package miniseed - -import ( - "os" - "path/filepath" - "strings" - "time" -) - -func (m *MiniSEED) handleCleanup(basePath, station, network string, lifeCycle int) { - for { - expiredFiles := []string{} - walkFn := func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - if !info.IsDir() { - modTime := info.ModTime() - duration := time.Duration(lifeCycle) * time.Hour * 24 - if time.Now().After(modTime.Add(duration)) && - strings.HasSuffix(path, ".mseed") && - strings.ContainsAny(path, station) && - strings.ContainsAny(path, network) { - expiredFiles = append(expiredFiles, path) - } - } - - return nil - } - - err := filepath.Walk(basePath, walkFn) - if err != nil { - m.OnError(nil, err) - } - - for _, file := range expiredFiles { - err := os.Remove(file) - if err != nil { - m.OnError(nil, err) - } - } - - time.Sleep(time.Minute) - } -} diff --git a/feature/miniseed/daemon.go b/feature/miniseed/daemon.go deleted file mode 100644 index f3ae249e5..000000000 --- a/feature/miniseed/daemon.go +++ /dev/null @@ -1,102 +0,0 @@ -package miniseed - -import ( - "fmt" - "os" - "strconv" - "sync" - "time" - - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/utils/duration" - "github.com/anyshake/observer/utils/logger" - "github.com/anyshake/observer/utils/text" - "github.com/bclswl0827/mseedio" - "github.com/fatih/color" -) - -func (m *MiniSEED) Run(options *feature.FeatureOptions, waitGroup *sync.WaitGroup) { - if !options.Config.MiniSEED.Enable { - m.OnStop(options, "service is disabled") - return - } - - // Get MiniSEED info & options - var ( - basePath = options.Config.MiniSEED.Path - lifeCycle = options.Config.MiniSEED.LifeCycle - station = text.TruncateString(options.Config.Station.Station, 5) - network = text.TruncateString(options.Config.Station.Network, 2) - location = text.TruncateString(options.Config.Station.Location, 2) - ) - - // Start cleanup routine if life cycle bigger than 0 - if lifeCycle > 0 { - go m.handleCleanup(basePath, station, network, lifeCycle) - } - - // Wait for time syncing - for options.Status.ReadyTime.IsZero() { - logger.Print(MODULE, "waiting for time alignment", color.FgYellow, false) - time.Sleep(1 * time.Second) - } - - // Init MiniSEED archiving buffer - currentTime, _ := duration.Timestamp(options.Status.System.Offset) - miniSEEDBuffer := &publisher.SegmentBuffer{ - TimeStamp: currentTime, - ChannelBuffer: map[string]*publisher.ChannelSegmentBuffer{ - "EHZ": {}, "EHE": {}, "EHN": {}, - }, - } - - // Get sequence number if file exists - for i, v := range miniSEEDBuffer.ChannelBuffer { - filePath := getFilePath(basePath, station, network, location, i, currentTime) - _, err := os.Stat(filePath) - if err == nil { - // Get last sequence number - logger.Print(MODULE, fmt.Sprintf("starting %s from last record", i), color.FgYellow, false) - - // Read MiniSEED file - var ms mseedio.MiniSeedData - err := ms.Read(filePath) - if err != nil { - m.OnError(options, err) - return - } - - // Get last sequence number - recordLength := len(ms.Series) - if recordLength > 0 { - lastRecord := ms.Series[recordLength-1] - lastSeqNum := lastRecord.FixedSection.SequenceNumber - n, err := strconv.Atoi(lastSeqNum) - if err != nil { - m.OnError(options, err) - return - } - // Set current sequence number - v.SeqNum = int64(n) - } - } else { - // Create new file with sequence number 0 - logger.Print(MODULE, fmt.Sprintf("starting %s from a new file", i), color.FgYellow, false) - } - } - m.OnStart(options, "service has started") - - // Append and write when new message arrived - expressionForSubscribe := true - publisher.Subscribe( - &options.Status.Geophone, - &expressionForSubscribe, - func(gp *publisher.Geophone) error { - return m.handleMessage(gp, options, miniSEEDBuffer) - }, - ) - - err := fmt.Errorf("service exited with an error") - m.OnError(options, err) -} diff --git a/feature/miniseed/filepath.go b/feature/miniseed/filepath.go deleted file mode 100644 index 62c0c9f6c..000000000 --- a/feature/miniseed/filepath.go +++ /dev/null @@ -1,16 +0,0 @@ -package miniseed - -import ( - "fmt" - "time" -) - -func getFilePath(basePath, station, network, location, channel string, timestamp time.Time) string { - // e.g. /path/to/miniseed/AS.SHAKE.00.EHZ.D.2023.208.mseed - return fmt.Sprintf("%s/%s.%s.%s.%s.D.%s.%s.mseed", - basePath, - network, station, location, channel, - timestamp.Format("2006"), - timestamp.Format("002"), - ) -} diff --git a/feature/miniseed/message.go b/feature/miniseed/message.go deleted file mode 100644 index aa49960d8..000000000 --- a/feature/miniseed/message.go +++ /dev/null @@ -1,89 +0,0 @@ -package miniseed - -import ( - "fmt" - "math" - "time" - - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/utils/text" - "github.com/bclswl0827/mseedio" -) - -func (m *MiniSEED) handleMessage(gp *publisher.Geophone, options *feature.FeatureOptions, buffer *publisher.SegmentBuffer) error { - var ( - basePath = options.Config.MiniSEED.Path - timestamp = time.UnixMilli(gp.TS).UTC() - station = text.TruncateString(options.Config.Station.Station, 5) - network = text.TruncateString(options.Config.Station.Network, 2) - location = text.TruncateString(options.Config.Station.Location, 2) - channelMap = map[string]publisher.Int32Array{ - "EHZ": gp.EHZ, "EHE": gp.EHE, "EHN": gp.EHN, - } - ) - - // Append geophone channel data to buffer - for i, v := range buffer.ChannelBuffer { - channelData, ok := channelMap[i] - if ok { - v.DataBuffer = append(v.DataBuffer, channelData...) - v.Samples += int32(len(channelData)) - } - } - - // Check if buffer is ready to write to file - timeDiffSec := timestamp.Sub(buffer.TimeStamp).Seconds() - if timeDiffSec >= MAX_DURATION { - // Append channels to MiniSEED - for i, v := range buffer.ChannelBuffer { - // Init MiniSEED data - var miniseed mseedio.MiniSeedData - miniseed.Init(ENCODING_TYPE, BIT_ORDER) - // Get sequence number in string - seqNum := fmt.Sprintf("%06d", v.SeqNum) - v.SeqNum++ - // Get sample rate - sampleRate := math.Round(float64(v.Samples) / timeDiffSec) - // Append channel data - err := miniseed.Append(v.DataBuffer, &mseedio.AppendOptions{ - ChannelCode: i, - SequenceNumber: seqNum, - StationCode: station, - NetworkCode: network, - LocationCode: location, - StartTime: timestamp, - SampleRate: sampleRate, - }) - if err != nil { - m.OnError(options, err) - return err - } - // Encode record to bytes - dataBytes, err := miniseed.Encode(mseedio.APPEND, BIT_ORDER) - if err != nil { - m.OnError(options, err) - return err - } - // Append bytes to file - filePath := getFilePath(basePath, station, network, location, i, timestamp) - err = miniseed.Write(filePath, mseedio.APPEND, dataBytes) - if err != nil { - m.OnError(options, err) - return err - } - } - - // Reset buffer - m.OnReady(options, "write") - buffer.TimeStamp = timestamp - for _, v := range buffer.ChannelBuffer { - v.DataBuffer = []int32{} - v.Samples = 0 - } - } else { - m.OnReady(options, "append") - } - - return nil -} diff --git a/feature/miniseed/types.go b/feature/miniseed/types.go deleted file mode 100644 index 7ebcc900d..000000000 --- a/feature/miniseed/types.go +++ /dev/null @@ -1,15 +0,0 @@ -package miniseed - -import ( - "github.com/bclswl0827/mseedio" -) - -const MODULE string = "miniseed" - -const ( - MAX_DURATION float64 = 3.0 - BIT_ORDER int = mseedio.MSBFIRST - ENCODING_TYPE int = mseedio.STEIM2 -) - -type MiniSEED struct{} diff --git a/feature/ntpclient/callbacks.go b/feature/ntpclient/callbacks.go deleted file mode 100644 index 809dfc062..000000000 --- a/feature/ntpclient/callbacks.go +++ /dev/null @@ -1,27 +0,0 @@ -package ntpclient - -import ( - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/duration" - "github.com/anyshake/observer/utils/logger" - "github.com/anyshake/observer/utils/text" - "github.com/fatih/color" -) - -func (n *NTPClient) OnStart(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgMagenta, false) -} - -func (n *NTPClient) OnStop(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgBlue, false) -} - -func (n *NTPClient) OnReady(options *feature.FeatureOptions, v ...any) { - options.Status.System.Offset = v[0].(float64) - options.Status.ReadyTime, _ = duration.Timestamp(options.Status.System.Offset) - logger.Print(MODULE, "time alignment succeed", color.FgGreen, false) -} - -func (n *NTPClient) OnError(options *feature.FeatureOptions, err error) { - logger.Print(MODULE, err, color.FgRed, false) -} diff --git a/feature/ntpclient/daemon.go b/feature/ntpclient/daemon.go deleted file mode 100644 index d57a40328..000000000 --- a/feature/ntpclient/daemon.go +++ /dev/null @@ -1,30 +0,0 @@ -package ntpclient - -import ( - "sync" - "time" - - "github.com/anyshake/observer/feature" -) - -func (n *NTPClient) Run(options *feature.FeatureOptions, waitGroup *sync.WaitGroup) { - var ( - host = options.Config.NTPClient.Host - port = options.Config.NTPClient.Port - timeout = options.Config.NTPClient.Timeout - interval = options.Config.NTPClient.Interval - ) - - n.OnStart(options, "service has started") - for { - result, err := n.read(host, port, timeout) - if err != nil { - n.OnError(options, err) - time.Sleep(time.Second) - continue - } - - n.OnReady(options, result) - time.Sleep(time.Duration(interval) * time.Second) - } -} diff --git a/feature/ntpclient/read.go b/feature/ntpclient/read.go deleted file mode 100644 index 0af71437e..000000000 --- a/feature/ntpclient/read.go +++ /dev/null @@ -1,18 +0,0 @@ -package ntpclient - -import ( - "time" - - "github.com/beevik/ntp" -) - -func (n *NTPClient) read(server string, port, timeout int) (float64, error) { - response, err := ntp.QueryWithOptions(server, ntp.QueryOptions{ - Port: port, Timeout: time.Duration(time.Duration(timeout).Seconds()), - }) - if err != nil { - return 0, err - } - - return response.ClockOffset.Seconds(), nil -} diff --git a/feature/ntpclient/types.go b/feature/ntpclient/types.go deleted file mode 100644 index 05928a5e9..000000000 --- a/feature/ntpclient/types.go +++ /dev/null @@ -1,5 +0,0 @@ -package ntpclient - -const MODULE string = "ntpclient" - -type NTPClient struct{} diff --git a/feature/seedlink/buffer.go b/feature/seedlink/buffer.go deleted file mode 100644 index f694e293c..000000000 --- a/feature/seedlink/buffer.go +++ /dev/null @@ -1,37 +0,0 @@ -package seedlink - -import ( - "time" - - "github.com/anyshake/observer/driver/seedlink" - "github.com/anyshake/observer/publisher" - "github.com/ostafen/clover/v2/document" - "github.com/ostafen/clover/v2/query" -) - -func (s *SeedLink) handleBuffer(gp *publisher.Geophone, buffer *seedlink.SeedLinkBuffer) error { - currentTime := time.UnixMilli(gp.TS).UTC() - if currentTime.Minute()%10 == 0 && currentTime.Second() == 0 { - expireThreshold := currentTime.Add(-buffer.Duration).UnixMilli() - buffer.Database.Delete(query.NewQuery(buffer.Collection).Where(query.Field("ts").Lt(expireThreshold))) - } - - var ( - ehz, _ = gp.EHZ.Value() - ehe, _ = gp.EHE.Value() - ehn, _ = gp.EHN.Value() - ) - doc := document.NewDocument() - doc.Set("ehz", ehz.(string)) - doc.Set("ehe", ehe.(string)) - doc.Set("ehn", ehn.(string)) - doc.Set("ts", gp.TS) - - _, err := buffer.Database.InsertOne(buffer.Collection, doc) - if err != nil { - return err - } - - s.OnReady(nil, "1 record added to buffer") - return nil -} diff --git a/feature/seedlink/callbacks.go b/feature/seedlink/callbacks.go deleted file mode 100644 index f0b4d800e..000000000 --- a/feature/seedlink/callbacks.go +++ /dev/null @@ -1,24 +0,0 @@ -package seedlink - -import ( - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/utils/logger" - "github.com/anyshake/observer/utils/text" - "github.com/fatih/color" -) - -func (s *SeedLink) OnStart(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgMagenta, false) -} - -func (s *SeedLink) OnStop(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgBlue, false) -} - -func (s *SeedLink) OnReady(options *feature.FeatureOptions, v ...any) { - logger.Print(MODULE, text.Concat(v...), color.FgGreen, false) -} - -func (s *SeedLink) OnError(options *feature.FeatureOptions, err error) { - logger.Print(MODULE, err, color.FgRed, false) -} diff --git a/feature/seedlink/command.go b/feature/seedlink/command.go deleted file mode 100644 index 7bd0c1b43..000000000 --- a/feature/seedlink/command.go +++ /dev/null @@ -1,106 +0,0 @@ -package seedlink - -import ( - "bufio" - "fmt" - "net" - "strings" - - "github.com/anyshake/observer/driver/seedlink" - "github.com/anyshake/observer/feature" -) - -func (s *SeedLink) handleCommand(options *feature.FeatureOptions, slGlobal *seedlink.SeedLinkGlobal, slClient *seedlink.SeedLinkClient, conn net.Conn) { - // Builtin seedlink list of SeedLink Protocol - SeedLinkCommands := map[string]seedlink.SeedLinkCommand{ - "END": {HasExtraArgs: false, SeedLinkCommandCallback: &seedlink.END{}}, - "DATA": {HasExtraArgs: true, SeedLinkCommandCallback: &seedlink.DATA{}}, - "TIME": {HasExtraArgs: true, SeedLinkCommandCallback: &seedlink.TIME{}}, - "INFO": {HasExtraArgs: true, SeedLinkCommandCallback: &seedlink.INFO{}}, - "HELLO": {HasExtraArgs: false, SeedLinkCommandCallback: &seedlink.HELLO{}}, - "SELECT": {HasExtraArgs: true, SeedLinkCommandCallback: &seedlink.SELECT{}}, - "STATION": {HasExtraArgs: true, SeedLinkCommandCallback: &seedlink.STATION{}}, - "CAPABILITIES": {HasExtraArgs: true, SeedLinkCommandCallback: &seedlink.CAPABILITIES{}}, - } - - // Dispatch connection events - s.OnStart(options, "user connected ", conn.RemoteAddr().String()) - defer s.OnStop(options, "user disconnected ", conn.RemoteAddr().String()) - - // Create a new reader - reader := bufio.NewReader(conn) - defer conn.Close() - - for { - // Read client message - clientMessage, err := reader.ReadString('\r') - if err != nil { - return - } else { - // Remove '\n' & '\r' from message and convert to uppercase - trimmedMessage := strings.ReplaceAll(clientMessage, "\n", "") - clientMessage = strings.ToUpper(strings.TrimSuffix(trimmedMessage, "\r")) - } - - // Ignore empty message - if len(clientMessage) == 0 { - continue - } - - // Disconnect if BYE received - if clientMessage == "BYE" { - return - } - - // Exit from stream mode - if clientMessage != "END" && - // An exception for INFO command - !strings.Contains(clientMessage, "INFO ") { - slClient.Streaming = false - } - - // Check if command is whitelisted - var ( - isCommandValid = true - argumentList = strings.Split(clientMessage, " ") - mainArgument = argumentList[0] - ) - - // Get command details from command list - cmd, ok := SeedLinkCommands[mainArgument] - if !ok { - isCommandValid = false - } - - // Send error if command is invalid - if !isCommandValid { - conn.Write([]byte(seedlink.RES_ERR)) - s.OnError(options, fmt.Errorf("RECV ERR: %s <%s>", conn.RemoteAddr().String(), clientMessage)) - } else { - s.OnReady(options, fmt.Sprintf("RECV OK: %s <%s>", conn.RemoteAddr().String(), clientMessage)) - } - - // Check for extra arguments - if isCommandValid && cmd.HasExtraArgs { - if len(argumentList) == 0 { - conn.Write([]byte(seedlink.RES_ERR)) - } else { - args := argumentList[1:] - for i := 0; i < len(args); i++ { - if len(args[i]) == 0 { - args = append(args[:i], args[i+1:]...) - } - } - err = cmd.Callback(slGlobal, slClient, options, s.handleMessage, conn, args...) - if err != nil { - cmd.Fallback(slGlobal, slClient, options, conn, args...) - } - } - } else if isCommandValid { - err = cmd.Callback(slGlobal, slClient, options, s.handleMessage, conn) - if err != nil { - cmd.Fallback(slGlobal, slClient, options, conn) - } - } - } -} diff --git a/feature/seedlink/daemon.go b/feature/seedlink/daemon.go deleted file mode 100644 index 84e3b9a55..000000000 --- a/feature/seedlink/daemon.go +++ /dev/null @@ -1,83 +0,0 @@ -package seedlink - -import ( - "fmt" - "net" - "os" - "os/signal" - "sync" - "syscall" - "time" - - "github.com/anyshake/observer/driver/seedlink" - "github.com/anyshake/observer/feature" - "github.com/anyshake/observer/publisher" - "github.com/anyshake/observer/utils/logger" - "github.com/anyshake/observer/utils/text" - "github.com/fatih/color" -) - -func (s *SeedLink) Run(options *feature.FeatureOptions, waitGroup *sync.WaitGroup) { - if !options.Config.SeedLink.Enable { - s.OnStop(options, "service is disabled") - return - } - - // Increase wait group counter - waitGroup.Add(1) - defer waitGroup.Done() - - // Create TCP server and listen - host, port := options.Config.SeedLink.Host, options.Config.SeedLink.Port - listener, err := net.Listen("tcp", fmt.Sprintf("%s:%d", host, port)) - if err != nil { - s.OnError(options, err) - os.Exit(1) - } - defer listener.Close() - - // Init SeedLink global state - var ( - slGlobal seedlink.SeedLinkGlobal - station = text.TruncateString(options.Config.Station.Station, 5) - network = text.TruncateString(options.Config.Station.Network, 2) - location = text.TruncateString(options.Config.Station.Location, 2) - bufferDuration = options.Config.SeedLink.Duration - currentLocalTime = time.Now().UTC() - ) - err = s.InitGlobal(&slGlobal, currentLocalTime, station, network, location, bufferDuration) - if err != nil { - s.OnError(options, err) - return - } - defer slGlobal.SeedLinkBuffer.Database.Close() - - // Accept incoming connections - s.OnStart(options, "service has started") - go func() { - for { - conn, err := listener.Accept() - if err != nil { - continue - } - // Handle seedlink from client - var slClient seedlink.SeedLinkClient - s.InitClient(&slClient) - go s.handleCommand(options, &slGlobal, &slClient, conn) - } - }() - - // Subscribe to publisher to append buffer - expressionForSubscribe := true - go publisher.Subscribe(&options.Status.Geophone, &expressionForSubscribe, func(gp *publisher.Geophone) error { - return s.handleBuffer(gp, &slGlobal.SeedLinkBuffer) - }) - - // Receive interrupt signals - sigCh := make(chan os.Signal, 1) - signal.Notify(sigCh, os.Interrupt, syscall.SIGTERM) - - // Wait for interrupt signals - <-sigCh - logger.Print(MODULE, "closing buffer area", color.FgBlue, true) -} diff --git a/feature/seedlink/init.go b/feature/seedlink/init.go deleted file mode 100644 index 93eab5b9a..000000000 --- a/feature/seedlink/init.go +++ /dev/null @@ -1,74 +0,0 @@ -package seedlink - -import ( - "fmt" - "log" - "time" - - "github.com/anyshake/observer/driver/seedlink" - "github.com/dgraph-io/badger/v4" - c "github.com/ostafen/clover/v2" - badgerstore "github.com/ostafen/clover/v2/store/badger" -) - -func (s *SeedLink) InitClient(slClient *seedlink.SeedLinkClient) { - slClient.Streaming = false -} - -func (s *SeedLink) InitGlobal(slGlobal *seedlink.SeedLinkGlobal, currentTime time.Time, station, network, location string, bufferDuration int) error { - var ( - streamEndTimeString = "9999-12-31 23:59:59" - currentTimeString = currentTime.Format("2006-01-02 15:04:01") - ) - - // Initialize SeedLink global states - slGlobal.SeedLinkState = seedlink.SeedLinkState{ - Organization: seedlink.ORGANIZATION, - StartTime: currentTimeString, - Software: seedlink.RELEASE, - } - slGlobal.Capabilities = []seedlink.SeedLinkCapability{ - {Name: "info:all"}, {Name: "info:gaps"}, {Name: "info:streams"}, - {Name: "dialup"}, {Name: "info:id"}, {Name: "multistation"}, - {Name: "window-extraction"}, {Name: "info:connections"}, - {Name: "info:capabilities"}, {Name: "info:stations"}, - } - - // Station field are not used by SeedLink, but are required by the protocol to differentiate between stations - slGlobal.Streams = []seedlink.SeedLinkStream{ - {BeginTime: currentTimeString, EndTime: streamEndTimeString, SeedName: "EHZ", Location: location, Type: "D", Station: station}, - {BeginTime: currentTimeString, EndTime: streamEndTimeString, SeedName: "EHE", Location: location, Type: "D", Station: station}, - {BeginTime: currentTimeString, EndTime: streamEndTimeString, SeedName: "EHN", Location: location, Type: "D", Station: station}, - } - slGlobal.Stations = []seedlink.SeedLinkStation{ - {BeginSequence: "000000", EndSequence: "FFFFFF", Station: station, Network: network, Description: fmt.Sprintf("%s station", network)}, - } - - // Create buffer store - const collectionName = "observer" - badgerstoreOptions := badger.DefaultOptions("").WithInMemory(true) - badgerstoreOptions.Logger = nil - store, err := badgerstore.OpenWithOptions(badgerstoreOptions) - if err != nil { - return err - } - db, err := c.OpenWithStore(store) - if err != nil { - return err - } - - // Create collection - collectionExists, err := db.HasCollection(collectionName) - if err != nil { - log.Fatalln(err) - } - if !collectionExists { - db.CreateCollection(collectionName) - } - - // Initialize ring buffer - duration := time.Duration(bufferDuration) * time.Second - slGlobal.SeedLinkBuffer = seedlink.SeedLinkBuffer{Collection: collectionName, Duration: duration, Database: db} - - return nil -} diff --git a/feature/seedlink/message.go b/feature/seedlink/message.go deleted file mode 100644 index 518c6ebb8..000000000 --- a/feature/seedlink/message.go +++ /dev/null @@ -1,44 +0,0 @@ -package seedlink - -import ( - "fmt" - "net" - - "github.com/anyshake/observer/driver/seedlink" - "github.com/anyshake/observer/publisher" -) - -func (s *SeedLink) handleMessage(conn net.Conn, client *seedlink.SeedLinkClient, gp *publisher.Geophone) error { - if len(client.Channels) == 0 { - return fmt.Errorf("no channels selected") - } - - var ( - ts = gp.TS - ehz = gp.EHZ - ehe = gp.EHE - ehn = gp.EHN - chMap = map[string]publisher.Int32Array{ - "EHZ": ehz, "EHE": ehe, "EHN": ehn, - } - ) - - for _, channel := range client.Channels { - countData, ok := chMap[channel] - if !ok { - conn.Write([]byte(seedlink.RES_ERR)) - err := fmt.Errorf("channel %s not found", channel) - return err - } - - err := seedlink.SendSLPacket(conn, client, seedlink.SeedLinkPacket{ - Channel: channel, Timestamp: ts, Count: countData, - }) - if err != nil { - return err - } - } - - s.OnReady(nil, "SENT OK: ", conn.RemoteAddr().String()) - return nil -} diff --git a/feature/seedlink/types.go b/feature/seedlink/types.go deleted file mode 100644 index 0dc3c22a5..000000000 --- a/feature/seedlink/types.go +++ /dev/null @@ -1,5 +0,0 @@ -package seedlink - -const MODULE string = "seedlink" - -type SeedLink struct{} diff --git a/feature/types.go b/feature/types.go deleted file mode 100644 index 511612da8..000000000 --- a/feature/types.go +++ /dev/null @@ -1,23 +0,0 @@ -package feature - -import ( - "sync" - - "github.com/anyshake/observer/config" - "github.com/anyshake/observer/publisher" - "gorm.io/gorm" -) - -type Feature interface { - Run(*FeatureOptions, *sync.WaitGroup) - OnStart(*FeatureOptions, ...any) - OnStop(*FeatureOptions, ...any) - OnReady(*FeatureOptions, ...any) - OnError(*FeatureOptions, error) -} - -type FeatureOptions struct { - Database *gorm.DB - Config *config.Conf - Status *publisher.Status -} diff --git a/frontend/src/.env b/frontend/src/.env index 445ae53cd..f48e3e791 100644 --- a/frontend/src/.env +++ b/frontend/src/.env @@ -1,2 +1,2 @@ -REACT_APP_VERSION=v2.12.4 -REACT_APP_RELEASE=1fe3bd65-20240505023901 +REACT_APP_VERSION=v3.0.0 +REACT_APP_RELEASE=b2150f37-20240808023110 diff --git a/frontend/src/.eslintrc b/frontend/src/.eslintrc index 1a2a56d20..32d858637 100644 --- a/frontend/src/.eslintrc +++ b/frontend/src/.eslintrc @@ -21,8 +21,7 @@ "linebreak-style": ["error", "unix"], "no-console": "warn", "no-fallthrough": "error", - "no-octal": "error", - "camelcase": "error" + "no-octal": "error" }, "globals": { "NodeJS": true diff --git a/frontend/src/package-lock.json b/frontend/src/package-lock.json index b86fa50a4..6eacecc66 100644 --- a/frontend/src/package-lock.json +++ b/frontend/src/package-lock.json @@ -9,6 +9,8 @@ "dependencies": { "@emotion/react": "^11.11.4", "@emotion/styled": "^11.11.0", + "@mdi/js": "^7.4.47", + "@mdi/react": "^1.6.1", "@mui/material": "^5.14.14", "@mui/x-date-pickers": "^6.16.0", "@reduxjs/toolkit": "^1.9.6", @@ -23,6 +25,7 @@ "oregondsp": "^1.3.1", "react": "^18.2.0", "react-dom": "^18.2.0", + "react-error-boundary": "^4.0.13", "react-hot-toast": "^2.4.1", "react-i18next": "^13.2.2", "react-leaflet": "^4.2.1", @@ -34,7 +37,6 @@ }, "devDependencies": { "@babel/plugin-proposal-private-property-in-object": "^7.21.11", - "@craco/craco": "^7.1.0", "@types/file-saver": "^2.0.7", "@types/leaflet": "^1.9.6", "@types/node": "^16.18.38", @@ -2197,35 +2199,13 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, - "node_modules/@craco/craco": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@craco/craco/-/craco-7.1.0.tgz", - "integrity": "sha512-oRAcPIKYrfPXp9rSzlsDNeOaVtDiKhoyqSXUoqiK24jCkHr4T8m/a2f74yXIzCbIheoUWDOIfWZyRgFgT+cpqA==", - "dev": true, - "dependencies": { - "autoprefixer": "^10.4.12", - "cosmiconfig": "^7.0.1", - "cosmiconfig-typescript-loader": "^1.0.0", - "cross-spawn": "^7.0.3", - "lodash": "^4.17.21", - "semver": "^7.3.7", - "webpack-merge": "^5.8.0" - }, - "bin": { - "craco": "dist/bin/craco.js" - }, - "engines": { - "node": ">=6" - }, - "peerDependencies": { - "react-scripts": "^5.0.0" - } - }, "node_modules/@cspotcode/source-map-support": { "version": "0.8.1", "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "@jridgewell/trace-mapping": "0.3.9" }, @@ -2238,6 +2218,8 @@ "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz", "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "@jridgewell/resolve-uri": "^3.0.3", "@jridgewell/sourcemap-codec": "^1.4.10" @@ -3729,6 +3711,19 @@ "integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==", "dev": true }, + "node_modules/@mdi/js": { + "version": "7.4.47", + "resolved": "https://registry.npmmirror.com/@mdi/js/-/js-7.4.47.tgz", + "integrity": "sha512-KPnNOtm5i2pMabqZxpUz7iQf+mfrYZyKCZ8QNz85czgEt7cuHcGorWfdzUMWYA0SD+a6Hn4FmJ+YhzzzjkTZrQ==" + }, + "node_modules/@mdi/react": { + "version": "1.6.1", + "resolved": "https://registry.npmmirror.com/@mdi/react/-/react-1.6.1.tgz", + "integrity": "sha512-4qZeDcluDFGFTWkHs86VOlHkm6gnKaMql13/gpIcUQ8kzxHgpj31NuCkD8abECVfbULJ3shc7Yt4HJ6Wu6SN4w==", + "dependencies": { + "prop-types": "^15.7.2" + } + }, "node_modules/@mui/base": { "version": "5.0.0-beta.40", "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.40.tgz", @@ -4564,25 +4559,33 @@ "version": "1.0.9", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz", "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/@tsconfig/node12": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz", "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/@tsconfig/node14": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz", "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/@tsconfig/node16": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz", "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/@types/babel__core": { "version": "7.20.5", @@ -6779,20 +6782,6 @@ "wrap-ansi": "^7.0.0" } }, - "node_modules/clone-deep": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", - "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", - "dev": true, - "dependencies": { - "is-plain-object": "^2.0.4", - "kind-of": "^6.0.2", - "shallow-clone": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/clsx": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", @@ -7076,30 +7065,13 @@ "node": ">=10" } }, - "node_modules/cosmiconfig-typescript-loader": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-1.0.9.tgz", - "integrity": "sha512-tRuMRhxN4m1Y8hP9SNYfz7jRwt8lZdWxdjg/ohg5esKmsndJIn4yT96oJVcf5x0eA11taXl+sIp+ielu529k6g==", - "dev": true, - "dependencies": { - "cosmiconfig": "^7", - "ts-node": "^10.7.0" - }, - "engines": { - "node": ">=12", - "npm": ">=6" - }, - "peerDependencies": { - "@types/node": "*", - "cosmiconfig": ">=7", - "typescript": ">=3" - } - }, "node_modules/create-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/cross-env": { "version": "7.0.3", @@ -7797,6 +7769,8 @@ "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">=0.3.1" } @@ -9384,15 +9358,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/flat": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", - "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "dev": true, - "bin": { - "flat": "cli.js" - } - }, "node_modules/flat-cache": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", @@ -10947,18 +10912,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-plain-object": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", - "dev": true, - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/is-potential-custom-element-name": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz", @@ -11144,15 +11097,6 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, - "node_modules/isobject": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", - "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/istanbul-lib-coverage": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", @@ -13872,7 +13816,9 @@ "version": "1.3.6", "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/makeerror": { "version": "1.0.12", @@ -16644,6 +16590,17 @@ "react": "^18.2.0" } }, + "node_modules/react-error-boundary": { + "version": "4.0.13", + "resolved": "https://registry.npmmirror.com/react-error-boundary/-/react-error-boundary-4.0.13.tgz", + "integrity": "sha512-b6PwbdSv8XeOSYvjt8LpgpKrZ0yGdtZokYwkwV2wlcZbxgopHX/hgPl5VgpnoVOWd868n1hktM8Qm4b+02MiLQ==", + "dependencies": { + "@babel/runtime": "^7.12.5" + }, + "peerDependencies": { + "react": ">=16.13.1" + } + }, "node_modules/react-error-overlay": { "version": "6.0.11", "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz", @@ -17779,18 +17736,6 @@ "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", "dev": true }, - "node_modules/shallow-clone": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", - "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", - "dev": true, - "dependencies": { - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -18921,6 +18866,8 @@ "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "@cspotcode/source-map-support": "^0.8.0", "@tsconfig/node10": "^1.0.7", @@ -18964,6 +18911,8 @@ "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">=0.4.0" } @@ -18972,7 +18921,9 @@ "version": "4.1.3", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/tsconfig-paths": { "version": "3.15.0", @@ -19388,7 +19339,9 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/v8-to-istanbul": { "version": "8.1.1", @@ -19783,20 +19736,6 @@ "node": ">=10.13.0" } }, - "node_modules/webpack-merge": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.10.0.tgz", - "integrity": "sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==", - "dev": true, - "dependencies": { - "clone-deep": "^4.0.1", - "flat": "^5.0.2", - "wildcard": "^2.0.0" - }, - "engines": { - "node": ">=10.0.0" - } - }, "node_modules/webpack-sources": { "version": "3.2.3", "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", @@ -19989,12 +19928,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/wildcard": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.1.tgz", - "integrity": "sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==", - "dev": true - }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", @@ -20542,6 +20475,8 @@ "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">=6" } diff --git a/frontend/src/package.json b/frontend/src/package.json index 797b53149..0a97995d8 100644 --- a/frontend/src/package.json +++ b/frontend/src/package.json @@ -5,6 +5,8 @@ "dependencies": { "@emotion/react": "^11.11.4", "@emotion/styled": "^11.11.0", + "@mdi/js": "^7.4.47", + "@mdi/react": "^1.6.1", "@mui/material": "^5.14.14", "@mui/x-date-pickers": "^6.16.0", "@reduxjs/toolkit": "^1.9.6", @@ -19,6 +21,7 @@ "oregondsp": "^1.3.1", "react": "^18.2.0", "react-dom": "^18.2.0", + "react-error-boundary": "^4.0.13", "react-hot-toast": "^2.4.1", "react-i18next": "^13.2.2", "react-leaflet": "^4.2.1", @@ -63,4 +66,4 @@ "tailwindcss-animated": "^1.0.1", "typescript": "^4.9.5" } -} \ No newline at end of file +} diff --git a/frontend/src/src/App.tsx b/frontend/src/src/App.tsx index 96dd56af0..a99dd78ed 100644 --- a/frontend/src/src/App.tsx +++ b/frontend/src/src/App.tsx @@ -17,6 +17,7 @@ import i18n, { i18nConfig } from "./config/i18n"; import { menuConfig } from "./config/menu"; import { routerConfig } from "./config/router"; import { getAsciiArt } from "./helpers/app/getAsciiArt"; +import { hideLoading } from "./helpers/app/hideLoading"; import { getCurrentLocale } from "./helpers/i18n/getCurrentLocale"; import { setUserLocale } from "./helpers/i18n/setUserLocale"; import { requestRestApi } from "./helpers/request/requestRestApi"; @@ -31,7 +32,7 @@ const App = () => { const { name, title, author, repository, homepage, footer, version, release } = globalConfig; useEffect(() => { - document.querySelector(".public-loading")?.remove(); + hideLoading(); const asciiArt = getAsciiArt(); // eslint-disable-next-line no-console console.info(`%c${asciiArt}`, "color: #0891b2;"); @@ -76,12 +77,12 @@ const App = () => { }); if (res?.data) { const initialized = true; - const { sensitivity, frequency } = res.data.geophone; + const { sensitivity, frequency } = res.data.sensor; dispatch(UpdateGeophone({ sensitivity, frequency, initialized })); - const { resolution, fullscale } = res.data.adc; + const { resolution, fullscale } = res.data.sensor; dispatch(UpdateADC({ resolution, fullscale, initialized })); - const { station, network, location } = res.data.station; - dispatch(UpdateStation({ station, network, location, initialized })); + const { station, network, location, channel } = res.data.stream; + dispatch(UpdateStation({ station, network, location, initialized, channel })); } }, [dispatch]); diff --git a/frontend/src/src/Error.tsx b/frontend/src/src/Error.tsx new file mode 100644 index 000000000..17abb921f --- /dev/null +++ b/frontend/src/src/Error.tsx @@ -0,0 +1,71 @@ +import { mdiBugPause } from "@mdi/js"; +import Icon from "@mdi/react"; +import { useEffect, useState } from "react"; + +import { Code } from "./components/Code"; +import { hideLoading } from "./helpers/app/hideLoading"; + +interface ErrorProps { + readonly code?: number; + readonly heading?: string; + readonly content?: string; + readonly action?: { + readonly label: string; + readonly onClick: () => void; + }; + readonly debug?: string; +} + +export const Error = ({ code, heading, content, action, debug }: ErrorProps) => { + const [isDebug, setIsDebug] = useState(false); + + useEffect(() => { + hideLoading(); + }, []); + + return ( +
+

+ {code ?? ":-("} +

+

+ {heading ?? "Oops! Something went wrong."} +

+

+ {content ?? "Please try to refresh the page or come back later."} +

+ + {action && ( + + )} + + {debug && ( + + )} + + {debug && isDebug && ( +
+ + {debug} + +
+ )} +
+ ); +}; diff --git a/frontend/src/src/helpers/app/getProtocol.tsx b/frontend/src/src/helpers/app/getProtocol.tsx index 40b5c5c75..551c6d62d 100644 --- a/frontend/src/src/helpers/app/getProtocol.tsx +++ b/frontend/src/src/helpers/app/getProtocol.tsx @@ -6,7 +6,7 @@ export const getProtocol = (http: boolean) => { return window.location.protocol === "https:" ? "wss:" : "ws:"; } if (http) { - return process.env.REACT_APP_BACKEND_TLS ? "https:" : "http:"; + return process.env.REACT_APP_BACKEND_TLS === "true" ? "https:" : "http:"; } - return process.env.REACT_APP_BACKEND_TLS ? "wss:" : "ws:"; + return process.env.REACT_APP_BACKEND_TLS === "true" ? "wss:" : "ws:"; }; diff --git a/frontend/src/src/helpers/app/hideLoading.tsx b/frontend/src/src/helpers/app/hideLoading.tsx new file mode 100644 index 000000000..5d0939747 --- /dev/null +++ b/frontend/src/src/helpers/app/hideLoading.tsx @@ -0,0 +1,9 @@ +export const hideLoading = () => { + const loadingScreen = document.querySelector(".public-loading") as HTMLElement; + if (loadingScreen) { + loadingScreen.style.opacity = "0"; + setTimeout(() => { + document.querySelector(".public-loading")?.remove(); + }, 500); + } +}; diff --git a/frontend/src/src/index.tsx b/frontend/src/src/index.tsx index 422a1dc23..70fc9529b 100644 --- a/frontend/src/src/index.tsx +++ b/frontend/src/src/index.tsx @@ -3,6 +3,7 @@ import "./index.css"; import { StyledEngineProvider } from "@mui/material/styles"; import ReactDOM from "react-dom/client"; +import { ErrorBoundary } from "react-error-boundary"; import { Provider } from "react-redux"; import { PersistGate } from "redux-persist/integration/react"; @@ -10,16 +11,19 @@ import App from "./App"; import { RouterWrapper } from "./components/RouterWrapper"; import { routerConfig } from "./config/router"; import store, { REDUX_PRESIST } from "./config/store"; +import { Error } from "./Error"; const root = ReactDOM.createRoot(document.getElementById("root")!); root.render( - - - - - - - - - + }> + + + + + + + + + + ); diff --git a/frontend/src/src/locales/en-US.json b/frontend/src/src/locales/en-US.json index bf93bf6cc..fc5aaa448 100644 --- a/frontend/src/src/locales/en-US.json +++ b/frontend/src/src/locales/en-US.json @@ -44,20 +44,8 @@ "label": "Frame Errors", "unit": "errors" }, - "pushed": { - "label": "Pushed Messages", - "unit": "messages" - }, - "failures": { - "label": "Push Failures", - "unit": "failures" - }, - "queued": { - "label": "Queued Messages", - "unit": "messages" - }, - "offset": { - "label": "System Time Offset", + "elapsed": { + "label": "Explorer Connected", "unit": "seconds" } } @@ -78,8 +66,8 @@ } }, "charts": { - "ehz": { - "label": "EHZ Channel Waveform Count", + "z_axis": { + "label": "{{channel}}Z Channel Waveform Count", "text": "PGA: {{ pga }} gal\nPGV: {{ pgv }} kine\nIntensity: {{ intensity }}", "advanced": { "panels": { @@ -93,8 +81,8 @@ } } }, - "ehe": { - "label": "EHE Channel Waveform Count", + "e_axis": { + "label": "{{channel}}E Channel Waveform Count", "text": "PGA: {{ pga }} gal\nPGV: {{ pgv }} kine\nIntensity: {{ intensity }}", "advanced": { "panels": { @@ -108,8 +96,8 @@ } } }, - "ehn": { - "label": "EHN Channel Waveform Count", + "n_axis": { + "label": "{{channel}}N Channel Waveform Count", "text": "PGA: {{ pga }} gal\nPGV: {{ pgv }} kine\nIntensity: {{ intensity }}", "advanced": { "panels": { @@ -150,8 +138,8 @@ } }, "charts": { - "ehz": { - "label": "EHZ Channel Waveform Count", + "z_axis": { + "label": "{{channel}}Z Channel Waveform Count", "text": "Click to setup filter", "advanced": { "panels": { @@ -165,8 +153,8 @@ } } }, - "ehe": { - "label": "EHE Channel Waveform Count", + "e_axis": { + "label": "{{channel}}E Channel Waveform Count", "text": "Click to setup filter", "advanced": { "panels": { @@ -180,8 +168,8 @@ } } }, - "ehn": { - "label": "EHN Channel Waveform Count", + "n_axis": { + "label": "{{channel}}N Channel Waveform Count", "text": "Click to setup filter", "advanced": { "panels": { @@ -207,16 +195,16 @@ "get_share_link": "Get Share Link" }, "labels": { - "ehz_detail": { - "label": "EHZ Details", + "z_axis_detail": { + "label": "{{channel}}Z Details", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" }, - "ehe_detail": { - "label": "EHE Details", + "e_axis_detail": { + "label": "{{channel}}E Details", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" }, - "ehn_detail": { - "label": "EHN Details", + "n_axis_detail": { + "label": "{{channel}}N Details", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" } }, diff --git a/frontend/src/src/locales/zh-CN.json b/frontend/src/src/locales/zh-CN.json index cc304570f..9b5858428 100644 --- a/frontend/src/src/locales/zh-CN.json +++ b/frontend/src/src/locales/zh-CN.json @@ -44,20 +44,8 @@ "label": "帧错误讯息量", "unit": "条" }, - "pushed": { - "label": "已推送讯息量", - "unit": "条" - }, - "failures": { - "label": "推送失败讯息量", - "unit": "条" - }, - "queued": { - "label": "等待推送讯息量", - "unit": "条" - }, - "offset": { - "label": "系统时间偏移量", + "elapsed": { + "label": "Explorer 连线时间", "unit": "秒" } } @@ -78,8 +66,8 @@ } }, "charts": { - "ehz": { - "label": "EHZ 通道波形", + "z_axis": { + "label": "{{channel}}Z 通道波形", "text": "PGA {{ pga }} gal\nPGV {{ pgv }} kine\n烈度 {{ intensity }}", "advanced": { "panels": { @@ -93,8 +81,8 @@ } } }, - "ehe": { - "label": "EHE 通道波形", + "e_axis": { + "label": "{{channel}}E 通道波形", "text": "PGA {{ pga }} gal\nPGV {{ pgv }} kine\n烈度 {{ intensity }}", "advanced": { "panels": { @@ -108,8 +96,8 @@ } } }, - "ehn": { - "label": "EHN 通道波形", + "n_axis": { + "label": "{{channel}}N 通道波形", "text": "PGA {{ pga }} gal\nPGV {{ pgv }} kine\n烈度 {{ intensity }}", "advanced": { "panels": { @@ -150,8 +138,8 @@ } }, "charts": { - "ehz": { - "label": "EHZ 通道波形", + "z_axis": { + "label": "{{channel}}Z 通道波形", "text": "点击下方按钮设定滤波器", "advanced": { "panels": { @@ -165,8 +153,8 @@ } } }, - "ehe": { - "label": "EHE 通道波形", + "e_axis": { + "label": "{{channel}}E 通道波形", "text": "点击下方按钮设定滤波器", "advanced": { "panels": { @@ -180,8 +168,8 @@ } } }, - "ehn": { - "label": "EHN 通道波形", + "n_axis": { + "label": "{{channel}}N 通道波形", "text": "点击下方按钮设定滤波器", "advanced": { "panels": { @@ -207,16 +195,16 @@ "get_share_link": "分享链接" }, "labels": { - "ehz_detail": { - "label": "EHZ 通道详情", + "z_axis_detail": { + "label": "{{channel}}Z 通道详情", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" }, - "ehe_detail": { - "label": "EHE 通道详情", + "e_axis_detail": { + "label": "{{channel}}E 通道详情", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" }, - "ehn_detail": { - "label": "EHN 通道详情", + "n_axis_detail": { + "label": "{{channel}}N 通道详情", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" } }, diff --git a/frontend/src/src/locales/zh-TW.json b/frontend/src/src/locales/zh-TW.json index bed5b74dc..922c4da6d 100644 --- a/frontend/src/src/locales/zh-TW.json +++ b/frontend/src/src/locales/zh-TW.json @@ -56,8 +56,8 @@ "label": "等待推送訊息量", "unit": "筆" }, - "offset": { - "label": "系統時間偏移量", + "elapsed": { + "label": "Explorer 連線時間", "unit": "秒" } } @@ -78,8 +78,8 @@ } }, "charts": { - "ehz": { - "label": "EHZ 通道波形", + "z_axis": { + "label": "{{channel}}Z 通道波形", "text": "PGA {{ pga }} gal\nPGV {{ pgv }} kine\n烈度 {{ intensity }}", "advanced": { "panels": { @@ -93,8 +93,8 @@ } } }, - "ehe": { - "label": "EHE 通道波形", + "e_axis": { + "label": "{{channel}}E 通道波形", "text": "PGA {{ pga }} gal\nPGV {{ pgv }} kine\n烈度 {{ intensity }}", "advanced": { "panels": { @@ -108,8 +108,8 @@ } } }, - "ehn": { - "label": "EHN 通道波形", + "n_axis": { + "label": "{{channel}}N 通道波形", "text": "PGA {{ pga }} gal\nPGV {{ pgv }} kine\n烈度 {{ intensity }}", "advanced": { "panels": { @@ -150,8 +150,8 @@ } }, "charts": { - "ehz": { - "label": "EHZ 通道波形", + "z_axis": { + "label": "{{channel}}Z 通道波形", "text": "點擊下方按鈕設定濾波器", "advanced": { "panels": { @@ -165,8 +165,8 @@ } } }, - "ehe": { - "label": "EHE 通道波形", + "e_axis": { + "label": "{{channel}}E 通道波形", "text": "點擊下方按鈕設定濾波器", "advanced": { "panels": { @@ -180,8 +180,8 @@ } } }, - "ehn": { - "label": "EHN 通道波形", + "n_axis": { + "label": "{{channel}}N 通道波形", "text": "點擊下方按鈕設定濾波器", "advanced": { "panels": { @@ -207,16 +207,16 @@ "get_share_link": "分享連結" }, "labels": { - "ehz_detail": { - "label": "EHZ 通道資訊", + "z_axis_detail": { + "label": "{{channel}}Z 通道資訊", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" }, - "ehe_detail": { - "label": "EHE 通道資訊", + "e_axis_detail": { + "label": "{{channel}}E 通道資訊", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" }, - "ehn_detail": { - "label": "EHN 通道資訊", + "n_axis_detail": { + "label": "{{channel}}N 通道資訊", "value": "PGA: {{ pga }} gal / PGV: {{ pgv }} kine" } }, diff --git a/frontend/src/src/models/request/history/0.json b/frontend/src/src/models/request/history/0.json index 435b8da99..bebdb1a01 100644 --- a/frontend/src/src/models/request/history/0.json +++ b/frontend/src/src/models/request/history/0.json @@ -1,6 +1,6 @@ { - "start": 0, - "end": 0, + "start_time": 0, + "end_time": 0, "format": "json", "channel": "*" } \ No newline at end of file diff --git a/frontend/src/src/models/request/mseed/0.json b/frontend/src/src/models/request/mseed/0.json index 30ed3aa63..91315f9a0 100644 --- a/frontend/src/src/models/request/mseed/0.json +++ b/frontend/src/src/models/request/mseed/0.json @@ -1,3 +1,3 @@ { - "action": "list" + "action": "show" } \ No newline at end of file diff --git a/frontend/src/src/models/response/common/history/0.json b/frontend/src/src/models/response/common/history/0.json index 91d967cdf..2db974bd2 100644 --- a/frontend/src/src/models/response/common/history/0.json +++ b/frontend/src/src/models/response/common/history/0.json @@ -1,19 +1,33 @@ { - "time": "1970-01-01T23:59:59Z", + "time": "2024-08-07T13:57:15Z", "status": 200, "error": false, "path": "/api/v1/history", "message": "The waveform data was successfully filtered", "data": [ { - "ts": 0, - "ehz": [ + "sample_rate": 125, + "timestamp": 1723039025571, + "z_axis": [ 0 ], - "ehe": [ + "e_axis": [ 0 ], - "ehn": [ + "n_axis": [ + 0 + ] + }, + { + "sample_rate": 125, + "timestamp": 1723039026571, + "z_axis": [ + 0 + ], + "e_axis": [ + 0 + ], + "n_axis": [ 0 ] } diff --git a/frontend/src/src/models/response/common/mseed/0.json b/frontend/src/src/models/response/common/mseed/0.json index a22432db1..84179ed9a 100644 --- a/frontend/src/src/models/response/common/mseed/0.json +++ b/frontend/src/src/models/response/common/mseed/0.json @@ -6,10 +6,11 @@ "message": "Successfully get MiniSEED file list", "data": [ { - "ttl": 0, - "time": 0, - "size": "null", - "name": "null" + "ttl": 10, + "time": 1723038095845, + "size": "1 MB", + "name": "AS.SHAKE.00.EHZ.D.2024.220.mseed", + "sha256": "16b7479c164d41d9416bbbe72e6d54475c798540a8c03ad8b9456024ad3d8bd0" } ] } \ No newline at end of file diff --git a/frontend/src/src/models/response/common/socket/0.json b/frontend/src/src/models/response/common/socket/0.json index b54a7c4db..d201aef80 100644 --- a/frontend/src/src/models/response/common/socket/0.json +++ b/frontend/src/src/models/response/common/socket/0.json @@ -1,12 +1,13 @@ { - "ts": 0, - "ehz": [ + "sample_rate": 125, + "timestamp": 1723043090281, + "z_axis": [ 0 ], - "ehe": [ + "e_axis": [ 0 ], - "ehn": [ + "n_axis": [ 0 ] } \ No newline at end of file diff --git a/frontend/src/src/models/response/common/station/0.json b/frontend/src/src/models/response/common/station/0.json index 606575741..14373705b 100644 --- a/frontend/src/src/models/response/common/station/0.json +++ b/frontend/src/src/models/response/common/station/0.json @@ -1,61 +1,64 @@ { - "time": "1970-01-01T23:59:59Z", + "time": "2024-08-07T14:53:55Z", "status": 200, "error": false, "path": "/api/v1/station", "message": "Successfully read station information", "data": { - "timestamp": 0, - "uptime": 0, + "explorer": { + "errors": 47, + "received": 131, + "sample_rate": 125, + "device_id": 20030813, + "latitude": 39.9, + "longitude": 116.4, + "elevation": 50.0, + "elapsed": 12 + }, "station": { - "uuid": "00000000-0000-0000-0000-000000000000", "name": "AnyShake Station", + "owner": "Lee", + "region": "Asia", + "country": "China", + "city": "Beijing" + }, + "stream": { "station": "SHAKE", "network": "AS", - "location": "00" - }, - "memory": { - "total": 0, - "free": 0, - "used": 0, - "percent": 0 - }, - "disk": { - "total": 0, - "free": 0, - "used": 0, - "percent": 0 - }, - "adc": { - "resolution": 24, - "fullscale": 5 + "location": "00", + "channel": "EH" }, - "os": { - "os": "*", - "arch": "*", - "distro": "*", - "hostname": "*" + "sensor": { + "frequency": 4.5, + "sensitivity": 28.8, + "velocity": true, + "vref": 5, + "fullscale": 10, + "resolution": 32 }, "cpu": { - "model": "*", - "percent": 0 + "model": "11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz", + "percent": 20 }, - "geophone": { - "sensitivity": 28.8, - "frequency": 4.5 + "disk": { + "total": 983349346304, + "free": 544028266496, + "used": 389294149632, + "percent": 41.71057534940964 }, - "position": { - "latitude": 0, - "longitude": 0, - "elevation": 0 + "memory": { + "total": 24898076672, + "free": 2693279744, + "used": 15624962048, + "percent": 62.755698979638844 }, - "status": { - "messages": 0, - "pushed": 0, - "errors": 0, - "failures": 0, - "queued": 0, - "offset": 0 + "os": { + "uptime": 209226, + "os": "linux", + "arch": "amd64", + "distro": "Linux", + "hostname": "Yuki", + "timestamp": 1723042749771 } } } \ No newline at end of file diff --git a/frontend/src/src/stores/station.tsx b/frontend/src/src/stores/station.tsx index b51acfa17..b8385f044 100644 --- a/frontend/src/src/stores/station.tsx +++ b/frontend/src/src/stores/station.tsx @@ -4,6 +4,7 @@ export interface Station { readonly station: string; readonly network: string; readonly location: string; + readonly channel: string; readonly initialized: boolean; } @@ -11,6 +12,7 @@ const initialStation: Station = { station: "SHAKE", network: "AS", location: "00", + channel: "EH", initialized: false }; diff --git a/frontend/src/src/views/History/handleSetCharts.tsx b/frontend/src/src/views/History/handleSetCharts.tsx index 31fc3ff4c..a50507305 100644 --- a/frontend/src/src/views/History/handleSetCharts.tsx +++ b/frontend/src/src/views/History/handleSetCharts.tsx @@ -17,7 +17,7 @@ export const handleSetCharts = ( { chart: ChartProps & { buffer: { - ts: number; + timestamp: number; data: number[]; }[]; ref: RefObject; @@ -43,9 +43,9 @@ export const handleSetCharts = ( } // Set channel buffer from API response - const buffer = res.data.map(({ ts, ...channels }) => ({ - data: channels[key as keyof typeof channels], - ts + const buffer = res.data.map(({ timestamp, ...channels }) => ({ + data: channels[key as keyof typeof channels] as number[], + timestamp })); prev[key].chart.buffer = buffer; @@ -62,7 +62,7 @@ export const handleSetCharts = ( // Get filtered values and apply to chart data const chartData = buffer - .map(({ ts, data }) => { + .map(({ timestamp, data }) => { const filteredData = filterEnabled ? getFilteredCounts(data, { poles: 4, @@ -73,7 +73,10 @@ export const handleSetCharts = ( }) : data; const dataSpanMS = 1000 / filteredData.length; - return filteredData.map((value, index) => [ts + dataSpanMS * index, value]); + return filteredData.map((value, index) => [ + timestamp + dataSpanMS * index, + value + ]); }) .reduce((acc, curArr) => acc.concat(curArr), []); const { current: chartObj } = prev[key].chart.ref; diff --git a/frontend/src/src/views/History/index.tsx b/frontend/src/src/views/History/index.tsx index 35f2e98f6..f600600a2 100644 --- a/frontend/src/src/views/History/index.tsx +++ b/frontend/src/src/views/History/index.tsx @@ -44,13 +44,13 @@ const History = (props: RouterComponentProps) => { const [searchParams, setSearchParams] = useSearchParams(); const [queryDuration, setQueryDuration] = useState<{ - start: number; - end: number; + start_time: number; + end_time: number; }>({ - start: searchParams.has("start") + start_time: searchParams.has("start") ? Number(searchParams.get("start")) : currentTimestamp - 1000 * duration, - end: searchParams.has("end") ? Number(searchParams.get("end")) : currentTimestamp + end_time: searchParams.has("end") ? Number(searchParams.get("end")) : currentTimestamp }); const handleTimeChange = (value: number, end: boolean) => @@ -79,9 +79,18 @@ const History = (props: RouterComponentProps) => { const [labels, setLabels] = useState< Record }> >({ - ehz: { label: "views.history.labels.ehz_detail.label", value: "-" }, - ehe: { label: "views.history.labels.ehe_detail.label", value: "-" }, - ehn: { label: "views.history.labels.ehn_detail.label", value: "-" } + z_axis: { + label: "views.history.labels.z_axis_detail.label", + value: "-" + }, + e_axis: { + label: "views.history.labels.e_axis_detail.label", + value: "-" + }, + n_axis: { + label: "views.history.labels.n_axis_detail.label", + value: "-" + } }); const [charts, setCharts] = useState< @@ -89,58 +98,54 @@ const History = (props: RouterComponentProps) => { string, { chart: ChartProps & { - buffer: { ts: number; data: number[] }[]; + buffer: { timestamp: number; data: number[] }[]; ref: RefObject; - filter: { - enabled: boolean; - lowCorner?: number; - highCorner?: number; - }; + filter: { enabled: boolean; lowCorner?: number; highCorner?: number }; }; holder: HolderProps; } > >({ - ehz: { + z_axis: { holder: { collapse: CollapseMode.COLLAPSE_HIDE, - label: "views.history.charts.ehz.label", - text: "views.history.charts.ehz.text" + label: "views.history.charts.z_axis.label", + text: "views.history.charts.z_axis.text" }, chart: { buffer: [], backgroundColor: "#d97706", filter: { enabled: false }, ref: useRef(null), - series: { name: "EHZ", type: "line", color: "#f1f5f9" } + series: { name: `${station.channel}Z`, type: "line", color: "#f1f5f9" } } }, - ehe: { + e_axis: { holder: { collapse: CollapseMode.COLLAPSE_SHOW, - label: "views.history.charts.ehe.label", - text: "views.history.charts.ehe.text" + label: "views.history.charts.e_axis.label", + text: "views.history.charts.e_axis.text" }, chart: { buffer: [], backgroundColor: "#10b981", filter: { enabled: false }, ref: useRef(null), - series: { name: "EHE", type: "line", color: "#f1f5f9" } + series: { name: `${station.channel}E`, type: "line", color: "#f1f5f9" } } }, - ehn: { + n_axis: { holder: { collapse: CollapseMode.COLLAPSE_SHOW, - label: "views.history.charts.ehn.label", - text: "views.history.charts.ehn.text" + label: "views.history.charts.n_axis.label", + text: "views.history.charts.n_axis.text" }, chart: { buffer: [], backgroundColor: "#0ea5e9", filter: { enabled: false }, ref: useRef(null), - series: { name: "EHN", type: "line", color: "#f1f5f9" } + series: { name: `${station.channel}N`, type: "line", color: "#f1f5f9" } } } }); @@ -171,7 +176,7 @@ const History = (props: RouterComponentProps) => { // Get filtered values and apply to chart data const chartData = prev[chartKey].chart.buffer - .map(({ ts, data }) => { + .map(({ timestamp, data }) => { const filteredData = filterEnabled ? getFilteredCounts(data, { poles: 4, @@ -182,7 +187,10 @@ const History = (props: RouterComponentProps) => { }) : data; const dataSpanMS = 1000 / filteredData.length; - return filteredData.map((value, index) => [ts + dataSpanMS * index, value]); + return filteredData.map((value, index) => [ + timestamp + dataSpanMS * index, + value + ]); }) .reduce((acc, curArr) => acc.concat(curArr), []); const { current: chartObj } = prev[chartKey].chart.ref; @@ -207,14 +215,14 @@ const History = (props: RouterComponentProps) => { }; const handleQueryWaveform = async () => { - const { start, end } = queryDuration; - if (!start || !end || start >= end) { + const { start_time, end_time } = queryDuration; + if (!start_time || !end_time || start_time >= end_time) { sendUserAlert(t("views.history.toasts.duration_error"), true); return; } const { backend } = apiConfig; - const payload = { start, end, channel: "", format: "json" }; + const payload = { start_time, end_time, channel: "", format: "json" }; const res = await sendPromiseAlert( requestRestApi< @@ -238,18 +246,22 @@ const History = (props: RouterComponentProps) => { }; const handleExportSACFile = () => { - const { start, end } = queryDuration; - if (!start || !end || start >= end) { + const { start_time, end_time } = queryDuration; + if (!start_time || !end_time || start_time >= end_time) { sendUserAlert(t("views.history.toasts.duration_error"), true); return; } - const handleSubmitForm = async (channel: string) => { + const handleSubmitForm = async (channelCode: string) => { setForm((prev) => ({ ...prev, open: false })); const { backend } = apiConfig; - const payload = { start, end, channel, format: "sac" }; - const sacFileName = getSACFileName(start, channel, station); + const payload = { start_time, end_time, channel: channelCode, format: "sac" }; + const sacFileName = getSACFileName( + start_time, + `${station.channel}${channelCode}`, + station + ); await sendPromiseAlert( requestRestApi< @@ -274,9 +286,9 @@ const History = (props: RouterComponentProps) => { ...prev, open: true, selectOptions: [ - { label: "EHZ", value: "EHZ" }, - { label: "EHE", value: "EHE" }, - { label: "EHN", value: "EHN" } + { label: "Z Axis", value: "Z" }, + { label: "E Axis", value: "E" }, + { label: "N Axis", value: "N" } ], onSubmit: handleSubmitForm, title: "views.history.forms.choose_channel.title", @@ -335,8 +347,8 @@ const History = (props: RouterComponentProps) => { const handleSelectEvent = (value: string) => { setSelect((prev) => ({ ...prev, open: false })); - const [start, end] = value.split("|").map(Number); - setQueryDuration({ start, end }); + const [start_time, end_time] = value.split("|").map(Number); + setQueryDuration({ start_time, end_time }); sendUserAlert(t("views.history.toasts.event_select_success")); }; @@ -394,15 +406,15 @@ const History = (props: RouterComponentProps) => { }; const handleGetShareLink = async () => { - const { start, end } = queryDuration; - if (!start || !end || start >= end) { + const { start_time, end_time } = queryDuration; + if (!start_time || !end_time || start_time >= end_time) { sendUserAlert(t("views.history.toasts.duration_error"), true); return; } const newSearchParams = new URLSearchParams(); - newSearchParams.set("start", String(start)); - newSearchParams.set("end", String(end)); + newSearchParams.set("start", String(start_time)); + newSearchParams.set("end", String(end_time)); setSearchParams(newSearchParams); const newFullUrl = window.location.href; const success = await setClipboardText(newFullUrl); @@ -426,13 +438,13 @@ const History = (props: RouterComponentProps) => { > handleTimeChange(value, false)} /> handleTimeChange(value, true)} @@ -486,7 +498,12 @@ const History = (props: RouterComponentProps) => { {Object.values(labels).map(({ label, value, values, ...rest }) => ( - @@ -506,7 +523,7 @@ const History = (props: RouterComponentProps) => { ; - }; + chart: ChartProps & { ref: RefObject }; holder: HolderProps & { values: Record }; } > @@ -26,41 +24,36 @@ export const handleSetCharts = ( if (!res?.data) { return; } - stateFn((prev) => { - const { data } = res; - const { timestamp } = data; - Object.keys(prev).forEach((key) => { - if (!(key in data)) { - return; - } - if (Object.prototype.hasOwnProperty.call(data[key as keyof typeof data], "percent")) { - // Get percentage value by key in state - const { percent } = data[key as keyof typeof data] as { - percent: number; - }; - const { current: chart } = prev[key].chart.ref; - if (chart) { - // Append new data to buffer and remove expired data - const initTimestamp = chart.chart.series[0].data.length - ? chart.chart.series[0].data[0].x - : timestamp; - chart.chart.series[0].addPoint( - [timestamp, percent], - true, - timestamp - initTimestamp >= RETENTION_THRESHOLD_MS - ); - } + const { timestamp } = res.data.os; + stateFn((prev) => { + // Set CPU usage chart + const { percent: cpuPercent } = res.data.cpu; + const { current: cpuChart } = prev.cpu.chart.ref; + if (cpuChart) { + const initTimestamp = cpuChart.chart.series[0].data.length + ? cpuChart.chart.series[0].data[0].x + : timestamp; + cpuChart.chart.series[0].addPoint( + [timestamp, cpuPercent], + true, + timestamp - initTimestamp >= RETENTION_THRESHOLD_MS + ); + } - prev[key] = { - ...prev[key], - holder: { - ...prev[key].holder, - values: { usage: percent.toFixed(2) } - } - }; - } - }); + // Set memory usage chart + const { percent: memoryPercent } = res.data.memory; + const { current: memoryChart } = prev.memory.chart.ref; + if (memoryChart) { + const initTimestamp = memoryChart.chart.series[0].data.length + ? memoryChart.chart.series[0].data[0].x + : timestamp; + memoryChart.chart.series[0].addPoint( + [timestamp, memoryPercent], + true, + timestamp - initTimestamp >= RETENTION_THRESHOLD_MS + ); + } return prev; }); diff --git a/frontend/src/src/views/Home/handleSetLabels.tsx b/frontend/src/src/views/Home/handleSetLabels.tsx index 681ff445b..cde70c95d 100644 --- a/frontend/src/src/views/Home/handleSetLabels.tsx +++ b/frontend/src/src/views/Home/handleSetLabels.tsx @@ -10,14 +10,12 @@ export const handleSetLabels = ( if (!res?.data) { return; } - const { status } = res.data; + + const { errors, received, elapsed } = res.data.explorer; stateFn((prev) => { - Object.keys(status).forEach((key) => { - if (key in prev) { - const newValue = status[key as keyof typeof status]; - prev[key] = { ...prev[key], value: String(newValue) }; - } - }); + prev.errors = { ...prev.errors, value: String(errors) }; + prev.messages = { ...prev.messages, value: String(received) }; + prev.elapsed = { ...prev.elapsed, value: String(elapsed) }; return prev; }); diff --git a/frontend/src/src/views/Home/handleSetMap.tsx b/frontend/src/src/views/Home/handleSetMap.tsx index d8cbe61bd..a3977dbe7 100644 --- a/frontend/src/src/views/Home/handleSetMap.tsx +++ b/frontend/src/src/views/Home/handleSetMap.tsx @@ -16,8 +16,8 @@ export const handleSetMap = ( if (!res?.data) { return; } - const { position } = res.data; - const { longitude, latitude, elevation } = position; + const { explorer } = res.data; + const { longitude, latitude, elevation } = explorer; stateFn((prev) => ({ ...prev, mapbox: { diff --git a/frontend/src/src/views/Home/index.tsx b/frontend/src/src/views/Home/index.tsx index e3d11f962..9eadaa841 100644 --- a/frontend/src/src/views/Home/index.tsx +++ b/frontend/src/src/views/Home/index.tsx @@ -39,12 +39,12 @@ const Home = () => { unit: "views.home.labels.errors.unit", label: "views.home.labels.errors.label" }, - offset: { + elapsed: { color: true, value: "0", icon: ClockIcon, - unit: "views.home.labels.offset.unit", - label: "views.home.labels.offset.label" + unit: "views.home.labels.elapsed.unit", + label: "views.home.labels.elapsed.label" } }); const [charts, setCharts] = useState< diff --git a/frontend/src/src/views/Realtime/handleSetBanner.tsx b/frontend/src/src/views/Realtime/handleSetBanner.tsx index 1f02e2f79..deb640abe 100644 --- a/frontend/src/src/views/Realtime/handleSetBanner.tsx +++ b/frontend/src/src/views/Realtime/handleSetBanner.tsx @@ -10,7 +10,7 @@ export const handleSetBanner = ( res: SocketUpdates, stateFn: Dispatch }>> ) => { - if (!res.ts) { + if (!res.timestamp) { return; } @@ -19,7 +19,7 @@ export const handleSetBanner = ( const scaleName = globalConfig.scales.find((s) => s.property().value === scaleId)?.property().name ?? "Unknown"; - const time = getTimeString(res.ts); + const time = getTimeString(res.timestamp); // Get sample rate in average const channels = Object.values(res).filter((v) => Array.isArray(v)) as number[][]; diff --git a/frontend/src/src/views/Realtime/handleSetCharts.tsx b/frontend/src/src/views/Realtime/handleSetCharts.tsx index 51ac106d3..3dc214716 100644 --- a/frontend/src/src/views/Realtime/handleSetCharts.tsx +++ b/frontend/src/src/views/Realtime/handleSetCharts.tsx @@ -19,13 +19,9 @@ export const handleSetCharts = ( string, { chart: ChartProps & { - buffer: { ts: number; data: number[] }[]; + buffer: { timestamp: number; data: number[] }[]; ref: RefObject; - filter: { - enabled: boolean; - lowCorner?: number; - highCorner?: number; - }; + filter: { enabled: boolean; lowCorner?: number; highCorner?: number }; }; holder: HolderProps & { values: Record }; } @@ -33,23 +29,24 @@ export const handleSetCharts = ( > > ) => { - if (!res.ts) { + if (!res.timestamp) { return; } stateFn((prev) => { - const { ts, ...channels } = res; + const { timestamp, sample_rate, ...channels } = res; const { retention } = store.getState().retention; + Object.keys(prev).forEach((key) => { if (!(key in res)) { return; } // Append new data to buffer and remove expired data - const channelData = channels[key as keyof typeof channels]; + const channelData = channels[key as keyof typeof channels] as number[]; const { buffer } = prev[key].chart; - buffer.push({ ts, data: channelData }); - const timeoutThreshold = ts - retention * 1000; - while (buffer[0].ts < timeoutThreshold) { + buffer.push({ timestamp, data: channelData }); + const timeoutThreshold = timestamp - retention * 1000; + while (buffer[0].timestamp < timeoutThreshold) { buffer.shift(); } @@ -68,7 +65,7 @@ export const handleSetCharts = ( // Get filtered values and apply to chart data const chartData = buffer - .map(({ ts, data }) => { + .map(({ timestamp, data }) => { const filteredData = filterEnabled ? getFilteredCounts(data, { poles: 4, @@ -79,7 +76,10 @@ export const handleSetCharts = ( }) : data; const dataSpanMS = 1000 / filteredData.length; - return filteredData.map((value, index) => [ts + dataSpanMS * index, value]); + return filteredData.map((value, index) => [ + timestamp + dataSpanMS * index, + value + ]); }) .reduce((acc, curArr) => acc.concat(curArr), []); const { current: chartObj } = prev[key].chart.ref; diff --git a/frontend/src/src/views/Realtime/index.tsx b/frontend/src/src/views/Realtime/index.tsx index 884287a5f..4882a3da5 100644 --- a/frontend/src/src/views/Realtime/index.tsx +++ b/frontend/src/src/views/Realtime/index.tsx @@ -1,6 +1,7 @@ import { HighchartsReactRefObject } from "highcharts-react-official"; import { RefObject, useCallback, useEffect, useRef, useState } from "react"; import { useTranslation } from "react-i18next"; +import { useSelector } from "react-redux"; import { Banner, BannerProps } from "../../components/Banner"; import { Button } from "../../components/Button"; @@ -10,6 +11,7 @@ import { CollapseMode, Holder, HolderProps } from "../../components/Holder"; import { Input } from "../../components/Input"; import { Panel } from "../../components/Panel"; import { apiConfig } from "../../config/api"; +import { ReduxStoreProps } from "../../config/store"; import { useSocket } from "../../helpers/hook/useSocket"; import { sendUserAlert } from "../../helpers/interact/sendUserAlert"; import { userThrottle } from "../../helpers/utils/userThrottle"; @@ -20,6 +22,8 @@ import { handleSetCharts } from "./handleSetCharts"; const Realtime = () => { const { t } = useTranslation(); + const { station } = useSelector(({ station }: ReduxStoreProps) => station); + const [banner, setBanner] = useState }>({ type: "warning", title: "views.realtime.banner.warning.label", @@ -30,7 +34,7 @@ const Realtime = () => { string, { chart: ChartProps & { - buffer: { ts: number; data: number[] }[]; + buffer: { timestamp: number; data: number[] }[]; ref: RefObject; filter: { enabled: boolean; @@ -42,11 +46,11 @@ const Realtime = () => { } > >({ - ehz: { + z_axis: { holder: { collapse: CollapseMode.COLLAPSE_HIDE, - label: "views.realtime.charts.ehz.label", - text: "views.realtime.charts.ehz.text", + label: "views.realtime.charts.z_axis.label", + text: "views.realtime.charts.z_axis.text", values: { pga: "0.00000", pgv: "0.00000", intensity: "-" } }, chart: { @@ -54,14 +58,14 @@ const Realtime = () => { backgroundColor: "#d97706", filter: { enabled: false }, ref: useRef(null), - series: { name: "EHZ", type: "line", color: "#f1f5f9" } + series: { name: `${station.channel}Z`, type: "line", color: "#f1f5f9" } } }, - ehe: { + e_axis: { holder: { collapse: CollapseMode.COLLAPSE_HIDE, - label: "views.realtime.charts.ehe.label", - text: "views.realtime.charts.ehe.text", + label: "views.realtime.charts.e_axis.label", + text: "views.realtime.charts.e_axis.text", values: { pga: "0.00000", pgv: "0.00000", intensity: "-" } }, chart: { @@ -69,14 +73,14 @@ const Realtime = () => { filter: { enabled: false }, backgroundColor: "#10b981", ref: useRef(null), - series: { name: "EHE", type: "line", color: "#f1f5f9" } + series: { name: `${station.channel}E`, type: "line", color: "#f1f5f9" } } }, - ehn: { + n_axis: { holder: { collapse: CollapseMode.COLLAPSE_HIDE, - label: "views.realtime.charts.ehn.label", - text: "views.realtime.charts.ehn.text", + label: "views.realtime.charts.n_axis.label", + text: "views.realtime.charts.n_axis.text", values: { pga: "0.00000", pgv: "0.00000", intensity: "-" } }, chart: { @@ -84,7 +88,7 @@ const Realtime = () => { backgroundColor: "#0ea5e9", filter: { enabled: false }, ref: useRef(null), - series: { name: "EHE", type: "line", color: "#f1f5f9" } + series: { name: `${station.channel}N`, type: "line", color: "#f1f5f9" } } } }); @@ -190,7 +194,7 @@ const Realtime = () => { 0 && len(gp.EHE) > 0 && len(gp.EHN) > 0 { - err := onMessage(gp) - if err != nil { - return - } - - lastTime = gp.TS - } - - time.Sleep(100 * time.Millisecond) - } -} diff --git a/publisher/types.go b/publisher/types.go deleted file mode 100644 index 7dfa7c0fe..000000000 --- a/publisher/types.go +++ /dev/null @@ -1,37 +0,0 @@ -package publisher - -import "time" - -type Int32Array []int32 - -type Geophone struct { - TS int64 `json:"ts" gorm:"ts;index;not null"` - EHZ Int32Array `json:"ehz" gorm:"ehz;type:text;not null"` - EHE Int32Array `json:"ehe" gorm:"ehe;type:text;not null"` - EHN Int32Array `json:"ehn" gorm:"ehn;type:text;not null"` -} - -type System struct { - Messages int64 `json:"messages"` - Errors int64 `json:"errors"` - Offset float64 `json:"offset"` -} - -type Status struct { - LastRecvTime time.Time // Timestamp of last received packet - ReadyTime time.Time // If is zero, app will stuck to wait for time syncing - Geophone Geophone // Geophone data of nearest 1 second - Buffer *Geophone // Buffer area, should not be externally accessed - System *System -} - -type ChannelSegmentBuffer struct { - DataBuffer []int32 - Samples int32 - SeqNum int64 -} - -type SegmentBuffer struct { - TimeStamp time.Time - ChannelBuffer map[string]*ChannelSegmentBuffer -} diff --git a/server/endpoints.go b/server/endpoints.go new file mode 100644 index 000000000..0f0f2e3bd --- /dev/null +++ b/server/endpoints.go @@ -0,0 +1,64 @@ +package server + +import ( + "fmt" + "time" + + "github.com/99designs/gqlgen/graphql/handler" + "github.com/99designs/gqlgen/graphql/playground" + v1 "github.com/anyshake/observer/api/v1" + "github.com/anyshake/observer/api/v1/devel" + "github.com/anyshake/observer/api/v1/history" + "github.com/anyshake/observer/api/v1/inventory" + "github.com/anyshake/observer/api/v1/mseed" + "github.com/anyshake/observer/api/v1/socket" + "github.com/anyshake/observer/api/v1/station" + "github.com/anyshake/observer/api/v1/trace" + v2 "github.com/anyshake/observer/api/v2" + "github.com/anyshake/observer/server/middleware/limit" + "github.com/gin-gonic/gin" +) + +func registerEndpointsV1(routerGroup *gin.RouterGroup, options *Options) error { + if options.RateFactor > 0 { + rateFactor := int64(options.RateFactor) + routerGroup.Use(limit.RateLimit(time.Second, rateFactor, rateFactor)) + } + resolver := &v1.Resolver{Options: options.ServicesOptions} + services := []v1.Endpoint{ + &station.Station{}, + &history.History{}, + &socket.Socket{}, + &trace.Trace{}, + &mseed.MSeed{}, + &inventory.Inventory{}, + } + for _, s := range services { + err := s.Register(routerGroup, resolver) + if err != nil { + return err + } + } + if options.DebugMode { + err := v1.Endpoint(&devel.Devel{}).Register(routerGroup, resolver) + if err != nil { + return err + } + } + + return nil +} + +func registerEndpointsV2(routerGroup *gin.RouterGroup, options *Options) { + apiEndpoint := handler.NewDefaultServer(v2.NewExecutableSchema(v2.Config{ + Resolvers: &v2.Resolver{Options: options.ServicesOptions}, + })) + routerGroup.POST("/graph", func(ctx *gin.Context) { + apiEndpoint.ServeHTTP(ctx.Writer, ctx.Request) + }) + if options.DebugMode { + routerGroup.GET("/graph", func(ctx *gin.Context) { + playground.Handler("AnyShake Observer APIv2", fmt.Sprintf("%s/v2/graph", options.ApiPrefix)).ServeHTTP(ctx.Writer, ctx.Request) + }) + } +} diff --git a/server/middleware/logger/writer.go b/server/middleware/logger/writer.go new file mode 100644 index 000000000..c5ece1c6a --- /dev/null +++ b/server/middleware/logger/writer.go @@ -0,0 +1,51 @@ +package logger + +import ( + "fmt" + "math" + "net/http" + "time" + + "github.com/gin-gonic/gin" + "github.com/sirupsen/logrus" +) + +func WriteLog(logger logrus.FieldLogger, notLogged ...string) gin.HandlerFunc { + var skip map[string]struct{} + + if length := len(notLogged); length > 0 { + skip = make(map[string]struct{}, length) + + for _, p := range notLogged { + skip[p] = struct{}{} + } + } + + return func(c *gin.Context) { + path := c.Request.URL.Path + start := time.Now() + c.Next() + stop := time.Since(start) + latency := int(math.Ceil(float64(stop.Nanoseconds()) / 1000000.0)) + statusCode := c.Writer.Status() + clientIP := c.ClientIP() + clientUserAgent := c.Request.UserAgent() + + if _, ok := skip[path]; ok { + return + } + + if len(c.Errors) > 0 { + logger.Error(c.Errors.ByType(gin.ErrorTypePrivate).String()) + } else { + msg := fmt.Sprintf("%s - \"%s %s\" %d \"%s\" (%d ms)", clientIP, c.Request.Method, path, statusCode, clientUserAgent, latency) + if statusCode >= http.StatusInternalServerError { + logger.Error(msg) + } else if statusCode >= http.StatusBadRequest { + logger.Warn(msg) + } else { + logger.Info(msg) + } + } + } +} diff --git a/server/router.go b/server/router.go deleted file mode 100644 index 17dd720f9..000000000 --- a/server/router.go +++ /dev/null @@ -1,39 +0,0 @@ -package server - -import ( - "time" - - "github.com/anyshake/observer/app" - "github.com/anyshake/observer/app/v1/devel" - "github.com/anyshake/observer/app/v1/history" - "github.com/anyshake/observer/app/v1/inventory" - "github.com/anyshake/observer/app/v1/mseed" - "github.com/anyshake/observer/app/v1/socket" - "github.com/anyshake/observer/app/v1/station" - "github.com/anyshake/observer/app/v1/trace" - "github.com/anyshake/observer/server/middleware/limit" - "github.com/gin-gonic/gin" -) - -func registerRouterV1(rg *gin.RouterGroup, options *app.ServerOptions) { - if options.RateFactor > 0 { - rateFactor := int64(options.RateFactor) - rg.Use(limit.RateLimit(time.Second, rateFactor, rateFactor)) - } - services := []ApiServices{ - &station.Station{}, - &history.History{}, - &socket.Socket{}, - &trace.Trace{}, - &mseed.MSeed{}, - &devel.Devel{}, - &inventory.Inventory{}, - } - for _, s := range services { - s.RegisterModule(rg, options) - } -} - -// TODO: Support FDSNWS -// func registerRouterFDSNWS(rg *gin.RouterGroup, options *app.ServerOptions) { -// } diff --git a/server/daemon.go b/server/serve.go similarity index 54% rename from server/daemon.go rename to server/serve.go index 611a6b02e..7e804db33 100644 --- a/server/daemon.go +++ b/server/serve.go @@ -3,15 +3,13 @@ package server import ( "fmt" "net/http" - "strings" - "github.com/anyshake/observer/app" "github.com/anyshake/observer/frontend" "github.com/anyshake/observer/server/middleware/cors" + loggerware "github.com/anyshake/observer/server/middleware/logger" "github.com/anyshake/observer/server/middleware/static" "github.com/anyshake/observer/server/response" "github.com/anyshake/observer/utils/logger" - "github.com/fatih/color" "github.com/gin-contrib/gzip" "github.com/gin-gonic/gin" ) @@ -20,22 +18,19 @@ func init() { gin.SetMode(gin.ReleaseMode) } -func StartDaemon(host string, port int, options *app.ServerOptions) error { +func Serve(host string, port int, options *Options) error { + if options.DebugMode { + logger.GetLogger(Serve).Warnln("PLEASE NOTE THAT DEBUG MODE IS ENABLED") + } r := gin.New() - r.Use( - gzip.Gzip(options.Gzip, gzip.WithExcludedPaths([]string{options.APIPrefix})), - gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string { - w := color.New(color.FgCyan).SprintFunc() - trimmedErr := strings.TrimRight(param.ErrorMessage, "\n") - loggerText := w(fmt.Sprintf("%s [server] %s %d %s %s %s\n", - param.TimeStamp.Format("2006/01/02 15:04:05"), - param.Method, param.StatusCode, - param.ClientIP, param.Path, trimmedErr, - )) - return loggerText - }), + // Setup Gzip & logger + r.Use( + gzip.Gzip(options.GzipLevel, gzip.WithExcludedPaths([]string{options.ApiPrefix})), + loggerware.WriteLog(logger.GetLogger(Serve)), ) + + // Setup Cross-Origin Resource Sharing (CORS) if options.CORS { r.Use(cors.AllowCORS([]cors.HttpHeader{ { @@ -54,25 +49,28 @@ func StartDaemon(host string, port int, options *app.ServerOptions) error { })) } + // Setup 404 error handler r.NoRoute(func(c *gin.Context) { response.Error(c, http.StatusNotFound) }) - // Register API v1 routers - registerRouterV1(r.Group( - fmt.Sprintf("/%s/v1", options.APIPrefix), - ), options) - // Register FDSNWS routers - // registerRouterFDSNWS(r.Group("/fdsnws"), options) + // Register API routers + err := registerEndpointsV1(r.Group(fmt.Sprintf("/%s/v1", options.ApiPrefix)), options) + if err != nil { + return err + } + registerEndpointsV2(r.Group(fmt.Sprintf("/%s/v2", options.ApiPrefix)), options) + // Setup static file serve r.Use(static.ServeEmbed(&static.LocalFileSystem{ Root: options.WebPrefix, Prefix: options.WebPrefix, FileSystem: static.CreateFilesystem(frontend.Dist, "dist"), })) - err := r.Run(fmt.Sprintf("%s:%d", host, port)) + // Start server + err = r.Run(fmt.Sprintf("%s:%d", host, port)) if err != nil { - logger.Fatal("server", err, color.FgRed) + logger.GetLogger(Serve).Fatalf("server: %v\n", err) } return err diff --git a/server/types.go b/server/types.go index bf49ec1c1..b04e0ec08 100644 --- a/server/types.go +++ b/server/types.go @@ -1,10 +1,15 @@ package server import ( - "github.com/anyshake/observer/app" - "github.com/gin-gonic/gin" + "github.com/anyshake/observer/services" ) -type ApiServices interface { - RegisterModule(rg *gin.RouterGroup, options *app.ServerOptions) +type Options struct { + CORS bool + DebugMode bool + GzipLevel int + RateFactor int + WebPrefix string + ApiPrefix string + ServicesOptions *services.Options } diff --git a/services/archiver/event.go b/services/archiver/event.go new file mode 100644 index 000000000..4e9274416 --- /dev/null +++ b/services/archiver/event.go @@ -0,0 +1,50 @@ +package archiver + +import ( + "github.com/anyshake/observer/drivers/dao/tables" + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/utils/logger" +) + +func (a *ArchiverService) handleExplorerEvent(data *explorer.ExplorerData) { + var adcCountModel tables.AdcCount + + a.recordBuffer[len(a.recordBuffer)-a.insertCountDown] = *data + a.insertCountDown-- + a.cleanupCountDown-- + + if a.insertCountDown == 0 { + records := make([]tables.AdcCount, len(a.recordBuffer)) + for i := 0; i < len(a.recordBuffer); i++ { + records[i] = tables.AdcCount{ + Timestamp: a.recordBuffer[i].Timestamp, + SampleRate: a.recordBuffer[i].SampleRate, + Z_Axis: a.recordBuffer[i].Z_Axis, + E_Axis: a.recordBuffer[i].E_Axis, + N_Axis: a.recordBuffer[i].N_Axis, + } + } + err := a.databaseConn. + Table(adcCountModel.GetName()). + Create(records). + Error + if err != nil { + logger.GetLogger(a.GetServiceName()).Warnln(err) + } else { + logger.GetLogger(a.GetServiceName()).Infof("%d record(s) has been inserted to database", len(a.recordBuffer)) + } + a.insertCountDown = INSERT_COUNTDOWN + } + + if a.cleanupCountDown == 0 { + err := a.databaseConn. + Table(adcCountModel.GetName()). + Where("timestamp < ?", data.Timestamp-int64(a.lifeCycle*86400*1000)). + Delete(&tables.AdcCount{}). + Error + if err != nil { + logger.GetLogger(a.GetServiceName()).Warnln(err) + } + a.cleanupCountDown = CLEANUP_COUNTDOWN + } +} diff --git a/services/archiver/name.go b/services/archiver/name.go new file mode 100644 index 000000000..73e691e1b --- /dev/null +++ b/services/archiver/name.go @@ -0,0 +1,5 @@ +package archiver + +func (s *ArchiverService) GetServiceName() string { + return "archiver" +} diff --git a/services/archiver/start.go b/services/archiver/start.go new file mode 100644 index 000000000..69378a697 --- /dev/null +++ b/services/archiver/start.go @@ -0,0 +1,46 @@ +package archiver + +import ( + "sync" + + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/services" + "github.com/anyshake/observer/utils/logger" +) + +func (a *ArchiverService) Start(options *services.Options, waitGroup *sync.WaitGroup) { + defer waitGroup.Done() + + // Get lifecycle from configuration + serviceConfig, ok := options.Config.Services[a.GetServiceName()] + if !ok { + logger.GetLogger(a.GetServiceName()).Errorln("service configuration not found") + return + } + if !serviceConfig.(map[string]any)["enable"].(bool) { + logger.GetLogger(a.GetServiceName()).Infoln("service has been disabled") + return + } + a.lifeCycle = int(serviceConfig.(map[string]any)["lifecycle"].(float64)) + a.cleanupCountDown = CLEANUP_COUNTDOWN + a.insertCountDown = INSERT_COUNTDOWN + a.databaseConn = options.Database + + // Subscribe to Explorer events + var explorerDeps *explorer.ExplorerDependency + err := options.Dependency.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + logger.GetLogger(a.GetServiceName()).Errorln(err) + return + } + explorerDriver := explorer.ExplorerDriver(&explorer.ExplorerDriverImpl{}) + explorerDriver.Subscribe(explorerDeps, a.GetServiceName(), a.handleExplorerEvent) + + logger.GetLogger(a.GetServiceName()).Infoln("service has been started") + <-options.CancelToken.Done() + explorerDriver.Unsubscribe(explorerDeps, a.GetServiceName()) + logger.GetLogger(a.GetServiceName()).Infoln("service has been stopped") +} diff --git a/services/archiver/types.go b/services/archiver/types.go new file mode 100644 index 000000000..75f452567 --- /dev/null +++ b/services/archiver/types.go @@ -0,0 +1,19 @@ +package archiver + +import ( + "github.com/anyshake/observer/drivers/explorer" + "gorm.io/gorm" +) + +const ( + INSERT_COUNTDOWN = 10 + CLEANUP_COUNTDOWN = 60 +) + +type ArchiverService struct { + recordBuffer [INSERT_COUNTDOWN]explorer.ExplorerData + insertCountDown int + cleanupCountDown int + lifeCycle int + databaseConn *gorm.DB +} diff --git a/services/miniseed/clean.go b/services/miniseed/clean.go new file mode 100644 index 000000000..de521d8b3 --- /dev/null +++ b/services/miniseed/clean.go @@ -0,0 +1,43 @@ +package miniseed + +import ( + "os" + "path/filepath" + "strings" + "time" +) + +func (m *MiniSeedService) handleClean() error { + expiredFiles := []string{} + walkFn := func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + modTime := info.ModTime() + duration := time.Duration(m.lifeCycle) * time.Hour * 24 + if time.Now().After(modTime.Add(duration)) && + strings.HasSuffix(path, ".mseed") && + strings.ContainsAny(path, m.stationCode) && + strings.ContainsAny(path, m.networkCode) { + expiredFiles = append(expiredFiles, path) + } + } + + return nil + } + + err := filepath.Walk(m.basePath, walkFn) + if err != nil { + return err + } + + for _, file := range expiredFiles { + err := os.Remove(file) + if err != nil { + return err + } + } + + return nil +} diff --git a/services/miniseed/event.go b/services/miniseed/event.go new file mode 100644 index 000000000..789d73481 --- /dev/null +++ b/services/miniseed/event.go @@ -0,0 +1,31 @@ +package miniseed + +import ( + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/utils/logger" +) + +func (m *MiniSeedService) handleExplorerEvent(data *explorer.ExplorerData) { + m.miniseedBuffer[len(m.miniseedBuffer)-m.writeBufferCountDown] = *data + + m.writeBufferCountDown-- + m.cleanUpCountDown-- + + if m.writeBufferCountDown == 0 { + err := m.handleWrite() + if err != nil { + logger.GetLogger(m.GetServiceName()).Warnln(err) + } else { + logger.GetLogger(m.GetServiceName()).Infof("%d record(s) has been written to MiniSEED file", MINISEED_WRITE_INTERVAL) + } + m.writeBufferCountDown = MINISEED_WRITE_INTERVAL + } + + if m.cleanUpCountDown == 0 { + err := m.handleClean() + if err != nil { + logger.GetLogger(m.GetServiceName()).Warnln(err) + } + m.cleanUpCountDown = MINISEED_CLEANUP_INTERVAL + } +} diff --git a/services/miniseed/filepath.go b/services/miniseed/filepath.go new file mode 100644 index 000000000..ccb8665ef --- /dev/null +++ b/services/miniseed/filepath.go @@ -0,0 +1,16 @@ +package miniseed + +import ( + "fmt" + "time" +) + +func (m *MiniSeedService) getFilePath(basePath, stationCode, networkCode, locationCode, channelName string, t time.Time) string { + // e.g. /path/to/miniseed/AS.SHAKE.00.EHZ.D.2023.208.mseed + return fmt.Sprintf("%s/%s.%s.%s.%s.D.%s.%s.mseed", + basePath, + networkCode, stationCode, locationCode, channelName, + t.UTC().Format("2006"), + t.UTC().Format("002"), + ) +} diff --git a/services/miniseed/name.go b/services/miniseed/name.go new file mode 100644 index 000000000..970069a9d --- /dev/null +++ b/services/miniseed/name.go @@ -0,0 +1,5 @@ +package miniseed + +func (s *MiniSeedService) GetServiceName() string { + return "miniseed" +} diff --git a/services/miniseed/start.go b/services/miniseed/start.go new file mode 100644 index 000000000..ffad162d8 --- /dev/null +++ b/services/miniseed/start.go @@ -0,0 +1,101 @@ +package miniseed + +import ( + "fmt" + "os" + "strconv" + "sync" + + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/services" + "github.com/anyshake/observer/utils/logger" + "github.com/bclswl0827/mseedio" +) + +func (m *MiniSeedService) Start(options *services.Options, waitGroup *sync.WaitGroup) { + defer waitGroup.Done() + + // Get lifecycle from configuration + serviceConfig, ok := options.Config.Services[m.GetServiceName()] + if !ok { + logger.GetLogger(m.GetServiceName()).Errorln("service configuration not found") + return + } + if !serviceConfig.(map[string]any)["enable"].(bool) { + logger.GetLogger(m.GetServiceName()).Infoln("service has been disabled") + return + } + m.lifeCycle = int(serviceConfig.(map[string]any)["lifecycle"].(float64)) + m.basePath = serviceConfig.(map[string]any)["path"].(string) + m.stationCode = options.Config.Stream.Station + m.networkCode = options.Config.Stream.Network + m.locationCode = options.Config.Stream.Location + m.channelPrefix = options.Config.Stream.Channel + m.miniseedSequence = map[string]int{ + explorer.EXPLORER_CHANNEL_CODE_Z: 0, + explorer.EXPLORER_CHANNEL_CODE_E: 0, + explorer.EXPLORER_CHANNEL_CODE_N: 0, + } + m.cleanUpCountDown = MINISEED_CLEANUP_INTERVAL + m.writeBufferCountDown = MINISEED_WRITE_INTERVAL + + // Get sequence number if file exists + currentTime, _ := options.TimeSource.GetTime() + for _, channelCode := range []string{ + explorer.EXPLORER_CHANNEL_CODE_Z, + explorer.EXPLORER_CHANNEL_CODE_E, + explorer.EXPLORER_CHANNEL_CODE_N, + } { + channelName := fmt.Sprintf("%s%s", m.channelPrefix, channelCode) + filePath := m.getFilePath( + m.basePath, + m.stationCode, + m.networkCode, + m.locationCode, + channelName, + currentTime, + ) + _, err := os.Stat(filePath) + if err == nil { + logger.GetLogger(m.GetServiceName()).Infof("starting %s from last record", channelName) + + // Get last sequence number from file + var ms mseedio.MiniSeedData + err := ms.Read(filePath) + if err != nil { + logger.GetLogger(m.GetServiceName()).Errorln(err) + continue + } + if len(ms.Series) > 0 { + lastRecord := ms.Series[len(ms.Series)-1] + lastSeqNum := lastRecord.FixedSection.SequenceNumber + n, err := strconv.Atoi(lastSeqNum) + if err != nil { + logger.GetLogger(m.GetServiceName()).Errorln(err) + continue + } + + // Set current sequence number + m.miniseedSequence[channelCode] = n + } + } + } + + // Subscribe to Explorer events + var explorerDeps *explorer.ExplorerDependency + err := options.Dependency.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + logger.GetLogger(m.GetServiceName()).Errorln(err) + return + } + explorerDriver := explorer.ExplorerDriver(&explorer.ExplorerDriverImpl{}) + explorerDriver.Subscribe(explorerDeps, m.GetServiceName(), m.handleExplorerEvent) + + logger.GetLogger(m.GetServiceName()).Infoln("service has been started") + <-options.CancelToken.Done() + explorerDriver.Unsubscribe(explorerDeps, m.GetServiceName()) + logger.GetLogger(m.GetServiceName()).Infoln("service has been stopped") +} diff --git a/services/miniseed/types.go b/services/miniseed/types.go new file mode 100644 index 000000000..24905eb69 --- /dev/null +++ b/services/miniseed/types.go @@ -0,0 +1,26 @@ +package miniseed + +import ( + "github.com/anyshake/observer/drivers/explorer" + "github.com/bclswl0827/mseedio" +) + +const ( + MINISEED_BIT_ORDER = mseedio.MSBFIRST + MINISEED_ENCODE_TYPE = mseedio.STEIM2 + MINISEED_WRITE_INTERVAL = 5 + MINISEED_CLEANUP_INTERVAL = 60 +) + +type MiniSeedService struct { + miniseedBuffer [MINISEED_WRITE_INTERVAL]explorer.ExplorerData + miniseedSequence map[string]int // Indepedent sequence number for Z, E, N + writeBufferCountDown int + cleanUpCountDown int + lifeCycle int + basePath string + stationCode string + networkCode string + locationCode string + channelPrefix string +} diff --git a/services/miniseed/write.go b/services/miniseed/write.go new file mode 100644 index 000000000..3ae275abf --- /dev/null +++ b/services/miniseed/write.go @@ -0,0 +1,81 @@ +package miniseed + +import ( + "fmt" + "time" + + "github.com/anyshake/observer/drivers/explorer" + "github.com/bclswl0827/mseedio" +) + +func (m *MiniSeedService) handleWrite() error { + var ( + startTimestamp = m.miniseedBuffer[0].Timestamp + startSampleRate = m.miniseedBuffer[0].SampleRate + ) + + // Check if the timestamp is continuous + for i := 1; i < len(m.miniseedBuffer); i++ { + if m.miniseedBuffer[i].Timestamp != startTimestamp+int64(i*1000) { + return fmt.Errorf("timestamp is not continuous, expected %d, got %d", startTimestamp+int64(i*1000), m.miniseedBuffer[i].Timestamp) + } + } + + // Check if sample rate is the same + for i := 1; i < len(m.miniseedBuffer); i++ { + if m.miniseedBuffer[i].SampleRate != startSampleRate { + return fmt.Errorf("sample rate is not the same, expected %d, got %d", startSampleRate, m.miniseedBuffer[i].SampleRate) + } + } + + // Write data to file by channels + for _, channelCode := range []string{ + explorer.EXPLORER_CHANNEL_CODE_Z, + explorer.EXPLORER_CHANNEL_CODE_E, + explorer.EXPLORER_CHANNEL_CODE_N, + } { + var channelBuffer []int32 + for i := 0; i < len(m.miniseedBuffer); i++ { + switch channelCode { + case explorer.EXPLORER_CHANNEL_CODE_Z: + channelBuffer = append(channelBuffer, m.miniseedBuffer[i].Z_Axis...) + case explorer.EXPLORER_CHANNEL_CODE_E: + channelBuffer = append(channelBuffer, m.miniseedBuffer[i].E_Axis...) + case explorer.EXPLORER_CHANNEL_CODE_N: + channelBuffer = append(channelBuffer, m.miniseedBuffer[i].N_Axis...) + } + } + var miniseed mseedio.MiniSeedData + err := miniseed.Init(MINISEED_ENCODE_TYPE, MINISEED_BIT_ORDER) + if err != nil { + return err + } + channelName := fmt.Sprintf("%s%s", m.channelPrefix, channelCode) + startTime := time.UnixMilli(startTimestamp).UTC() + err = miniseed.Append(channelBuffer, &mseedio.AppendOptions{ + SequenceNumber: fmt.Sprintf("%06d", m.miniseedSequence[channelCode]), + SampleRate: float64(startSampleRate), + StartTime: startTime, + ChannelCode: channelName, + StationCode: m.stationCode, + NetworkCode: m.networkCode, + LocationCode: m.locationCode, + }) + if err != nil { + return err + } + dataBytes, err := miniseed.Encode(mseedio.APPEND, MINISEED_BIT_ORDER) + if err != nil { + return err + } + filePath := m.getFilePath(m.basePath, m.stationCode, m.networkCode, m.locationCode, channelName, startTime) + err = miniseed.Write(filePath, mseedio.APPEND, dataBytes) + if err != nil { + return err + } + + m.miniseedSequence[channelCode]++ + } + + return nil +} diff --git a/services/types.go b/services/types.go new file mode 100644 index 000000000..7e98122cc --- /dev/null +++ b/services/types.go @@ -0,0 +1,24 @@ +package services + +import ( + "context" + "sync" + + "github.com/anyshake/observer/config" + "github.com/anyshake/observer/utils/timesource" + "go.uber.org/dig" + "gorm.io/gorm" +) + +type Options struct { + Config *config.Config + Dependency *dig.Container + Database *gorm.DB + TimeSource timesource.Source + CancelToken context.Context +} + +type Service interface { + Start(*Options, *sync.WaitGroup) + GetServiceName() string +} diff --git a/services/watchdog/name.go b/services/watchdog/name.go new file mode 100644 index 000000000..cabb6faaa --- /dev/null +++ b/services/watchdog/name.go @@ -0,0 +1,5 @@ +package watchdog + +func (w *WatchdogService) GetServiceName() string { + return "watchdog" +} diff --git a/services/watchdog/start.go b/services/watchdog/start.go new file mode 100644 index 000000000..96634f205 --- /dev/null +++ b/services/watchdog/start.go @@ -0,0 +1,43 @@ +package watchdog + +import ( + "sync" + "time" + + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/services" + "github.com/anyshake/observer/utils/logger" +) + +func (s *WatchdogService) Start(options *services.Options, waitGroup *sync.WaitGroup) { + defer waitGroup.Done() + logger.GetLogger(s.GetServiceName()).Infoln("service has been started") + + var explorerDeps *explorer.ExplorerDependency + err := options.Dependency.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + logger.GetLogger(s.GetServiceName()).Errorln(err) + return + } + + prevUpdatedAt := explorerDeps.Health.UpdatedAt + ticker := time.NewTicker(CHECK_INTERVAL) + + for { + select { + case <-options.CancelToken.Done(): + logger.GetLogger(s.GetServiceName()).Infoln("service has been stopped") + return + case <-ticker.C: + if prevUpdatedAt == explorerDeps.Health.UpdatedAt { + logger.GetLogger(s.GetServiceName()).Warnf("device is not responding, checking again in next %d seconds", int(CHECK_INTERVAL.Seconds())) + } else { + logger.GetLogger(s.GetServiceName()).Infof("device works perfectly, checking again in next %d seconds", int(CHECK_INTERVAL.Seconds())) + } + prevUpdatedAt = explorerDeps.Health.UpdatedAt + } + } +} diff --git a/services/watchdog/types.go b/services/watchdog/types.go new file mode 100644 index 000000000..d99610b1b --- /dev/null +++ b/services/watchdog/types.go @@ -0,0 +1,7 @@ +package watchdog + +import "time" + +const CHECK_INTERVAL = 30 * time.Second + +type WatchdogService struct{} diff --git a/startups/explorer/execute.go b/startups/explorer/execute.go new file mode 100644 index 000000000..c6627e88d --- /dev/null +++ b/startups/explorer/execute.go @@ -0,0 +1,41 @@ +package explorer + +import ( + "errors" + + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/startups" + "github.com/anyshake/observer/utils/logger" + "go.uber.org/dig" +) + +func (t *ExplorerStartupTask) Execute(depsContainer *dig.Container, options *startups.Options) error { + var explorerDeps *explorer.ExplorerDependency + err := depsContainer.Invoke(func(deps *explorer.ExplorerDependency) error { + explorerDeps = deps + return nil + }) + if err != nil { + return err + } + explorerDriver := explorer.ExplorerDriver(&explorer.ExplorerDriverImpl{}) + + logger.GetLogger(t.GetTaskName()).Infoln("checking availability of opened device") + if !explorerDriver.IsAvailable(explorerDeps) { + return errors.New("opened device is not working, check the connection or modes") + } + + logger.GetLogger(t.GetTaskName()).Infoln("device is being initialized, please wait") + err = explorerDriver.Init(explorerDeps) + if err != nil { + return err + } + + logger.GetLogger(t.GetTaskName()).Infoln("device has been initialized successfully") + if !explorerDeps.Config.LegacyMode { + logger.GetLogger(t.GetTaskName()).Infof("handshake successful, device ID: %08X", explorerDeps.Config.DeviceId) + } else { + logger.GetLogger(t.GetTaskName()).Warnln("device is in legacy mode, keep an eye on the CPU usage") + } + return nil +} diff --git a/startups/explorer/name.go b/startups/explorer/name.go new file mode 100644 index 000000000..06e611cc9 --- /dev/null +++ b/startups/explorer/name.go @@ -0,0 +1,5 @@ +package explorer + +func (t *ExplorerStartupTask) GetTaskName() string { + return "explorer" +} diff --git a/startups/explorer/provide.go b/startups/explorer/provide.go new file mode 100644 index 000000000..7c4574197 --- /dev/null +++ b/startups/explorer/provide.go @@ -0,0 +1,40 @@ +package explorer + +import ( + "github.com/anyshake/observer/drivers/explorer" + "github.com/anyshake/observer/drivers/transport" + "github.com/anyshake/observer/startups" + "github.com/anyshake/observer/utils/logger" + "go.uber.org/dig" +) + +func (t *ExplorerStartupTask) Provide(container *dig.Container, options *startups.Options) error { + // Open AnyShake Explorer device + explorerDsn := &transport.TransportDependency{ + DSN: options.Config.Explorer.DSN, + Engine: options.Config.Explorer.Engine, + } + explorerTransport, err := transport.New(explorerDsn) + if err != nil { + return err + } + err = explorerTransport.Open(explorerDsn) + if err != nil { + return err + } + + logger.GetLogger(t.GetTaskName()).Infoln("device has been opened successfully") + return container.Provide(func() *explorer.ExplorerDependency { + return &explorer.ExplorerDependency{ + FallbackTime: options.TimeSource, + CancelToken: t.CancelToken, + Transport: explorerTransport, + Config: explorer.ExplorerConfig{ + Latitude: options.Config.Location.Latitude, + Longitude: options.Config.Location.Longitude, + Elevation: options.Config.Location.Elevation, + LegacyMode: options.Config.Explorer.Legacy, + }, + } + }) +} diff --git a/startups/explorer/types.go b/startups/explorer/types.go new file mode 100644 index 000000000..d616eb809 --- /dev/null +++ b/startups/explorer/types.go @@ -0,0 +1,7 @@ +package explorer + +import "context" + +type ExplorerStartupTask struct { + CancelToken context.Context +} diff --git a/startups/types.go b/startups/types.go new file mode 100644 index 000000000..4013d2229 --- /dev/null +++ b/startups/types.go @@ -0,0 +1,20 @@ +package startups + +import ( + "github.com/anyshake/observer/config" + "github.com/anyshake/observer/utils/timesource" + "go.uber.org/dig" + "gorm.io/gorm" +) + +type Options struct { + Config *config.Config + Database *gorm.DB + TimeSource timesource.Source +} + +type StartupTask interface { + Provide(*dig.Container, *Options) error + Execute(*dig.Container, *Options) error + GetTaskName() string +} diff --git a/utils/duration/difference.go b/utils/duration/difference.go deleted file mode 100644 index d94140518..000000000 --- a/utils/duration/difference.go +++ /dev/null @@ -1,11 +0,0 @@ -package duration - -import "time" - -func Difference(a, b time.Time) time.Duration { - if a.After(b) { - return a.Sub(b) - } - - return b.Sub(a) -} diff --git a/utils/duration/timestamp.go b/utils/duration/timestamp.go deleted file mode 100644 index 431461161..000000000 --- a/utils/duration/timestamp.go +++ /dev/null @@ -1,11 +0,0 @@ -package duration - -import "time" - -func Timestamp(offset float64) (time.Time, int64) { - offsetDuration := time.Duration(offset * float64(time.Second)) - currentTime := time.Now().UTC() - - result := currentTime.Add(offsetDuration) - return result, result.UnixMilli() -} diff --git a/utils/fifo/new.go b/utils/fifo/new.go new file mode 100644 index 000000000..28770def6 --- /dev/null +++ b/utils/fifo/new.go @@ -0,0 +1,8 @@ +package fifo + +func New(size int) Buffer { + return Buffer{ + data: make([]byte, size), + capacity: size, + } +} diff --git a/utils/fifo/read.go b/utils/fifo/read.go new file mode 100644 index 000000000..35e66fdf0 --- /dev/null +++ b/utils/fifo/read.go @@ -0,0 +1,38 @@ +package fifo + +import ( + "fmt" +) + +func (b *Buffer) Read(header []byte, size int) ([]byte, error) { + b.mutex.Lock() + defer b.mutex.Unlock() + + for { + if (b.writeIndex-b.readIndex+b.capacity)%b.capacity < size { + return nil, fmt.Errorf("not enough data") + } + + isHeaderFind := true + for i := 0; i < len(header); i++ { + if b.data[(b.readIndex+i)%b.capacity] != header[i] { + isHeaderFind = false + break + } + } + + if isHeaderFind { + break + } + + b.readIndex = (b.readIndex + 1) % b.capacity + } + + packet := make([]byte, size) + for i := 0; i < size; i++ { + packet[i] = b.data[(b.readIndex+i)%b.capacity] + } + + b.readIndex = (b.readIndex + size) % b.capacity + return packet, nil +} diff --git a/utils/fifo/types.go b/utils/fifo/types.go new file mode 100644 index 000000000..57ebe06d2 --- /dev/null +++ b/utils/fifo/types.go @@ -0,0 +1,11 @@ +package fifo + +import "sync" + +type Buffer struct { + data []byte + readIndex int + writeIndex int + capacity int + mutex sync.Mutex +} diff --git a/utils/fifo/write.go b/utils/fifo/write.go new file mode 100644 index 000000000..dd7cbdf36 --- /dev/null +++ b/utils/fifo/write.go @@ -0,0 +1,19 @@ +package fifo + +func (b *Buffer) Write(p []byte) (n int, err error) { + b.mutex.Lock() + defer b.mutex.Unlock() + + for _, bt := range p { + b.data[b.writeIndex] = bt + + b.writeIndex = (b.writeIndex + 1) % b.capacity + if b.writeIndex == b.readIndex { + b.readIndex = (b.readIndex + 1) % b.capacity + } + + n++ + } + + return n, nil +} diff --git a/utils/logger/fatal.go b/utils/logger/fatal.go deleted file mode 100644 index f254a7eac..000000000 --- a/utils/logger/fatal.go +++ /dev/null @@ -1,12 +0,0 @@ -package logger - -import ( - "os" - - "github.com/fatih/color" -) - -func Fatal(module string, v any, colorCode color.Attribute) { - Print(module, v, colorCode, false) - os.Exit(1) -} diff --git a/utils/logger/file.go b/utils/logger/file.go new file mode 100644 index 000000000..adc4f1a58 --- /dev/null +++ b/utils/logger/file.go @@ -0,0 +1,12 @@ +package logger + +import ( + "github.com/rifflock/lfshook" + "github.com/sirupsen/logrus" +) + +func SetFile(path string) { + logrus.AddHook(lfshook.NewHook( + path, &logrus.JSONFormatter{}, + )) +} diff --git a/utils/logger/initialize.go b/utils/logger/initialize.go new file mode 100644 index 000000000..0336b30bc --- /dev/null +++ b/utils/logger/initialize.go @@ -0,0 +1,13 @@ +package logger + +import ( + nested "github.com/antonfisher/nested-logrus-formatter" + "github.com/sirupsen/logrus" +) + +func Initialize() { + logrus.SetFormatter(&nested.Formatter{ + TimestampFormat: TIMESTAMP_FORMAT, + FieldsOrder: []string{"module"}, + }) +} diff --git a/utils/logger/level.go b/utils/logger/level.go new file mode 100644 index 000000000..3d31856df --- /dev/null +++ b/utils/logger/level.go @@ -0,0 +1,26 @@ +package logger + +import ( + "errors" + + "github.com/sirupsen/logrus" +) + +func SetLevel(level LogLevel) error { + switch level { + case INFO: + logrus.SetLevel(logrus.InfoLevel) + return nil + case WARN: + logrus.SetLevel(logrus.WarnLevel) + return nil + case ERROR: + logrus.SetLevel(logrus.ErrorLevel) + return nil + case FATAL: + logrus.SetLevel(logrus.FatalLevel) + return nil + } + + return errors.New("unknown log level") +} diff --git a/utils/logger/logger.go b/utils/logger/logger.go new file mode 100644 index 000000000..dcbd2bc19 --- /dev/null +++ b/utils/logger/logger.go @@ -0,0 +1,38 @@ +package logger + +import ( + "reflect" + "runtime" + "strings" + + "github.com/sirupsen/logrus" +) + +func GetLogger(x any) *logrus.Entry { + if v, ok := x.(string); ok { + return logrus.WithFields(logrus.Fields{ + "module": strings.ToLower(v), + }) + } + + val := reflect.ValueOf(x) + if val.Kind() == reflect.Func { + runtimeFunc := runtime.FuncForPC(val.Pointer()) + if runtimeFunc != nil { + moduleNames := strings.Split(runtimeFunc.Name(), ".") + if len(moduleNames) > 1 { + lastPart := moduleNames[len(moduleNames)-1] + moduleName := strings.Split(lastPart, "/") + if len(moduleName) > 0 { + return logrus.WithFields(logrus.Fields{ + "module": strings.ToLower(moduleName[len(moduleName)-1]), + }) + } + } + } + } + + return logrus.WithFields(logrus.Fields{ + "module": "unknown", + }) +} diff --git a/utils/logger/print.go b/utils/logger/print.go deleted file mode 100644 index 3071e3a3a..000000000 --- a/utils/logger/print.go +++ /dev/null @@ -1,17 +0,0 @@ -package logger - -import ( - "fmt" - "log" - - "github.com/fatih/color" -) - -func Print(module string, v any, colorCode color.Attribute, carriage bool) { - color.Set(colorCode) - if carriage { - fmt.Print("\r") - } - log.Printf("[%s] %v\n", module, v) - color.Unset() -} diff --git a/utils/logger/types.go b/utils/logger/types.go new file mode 100644 index 000000000..86c897218 --- /dev/null +++ b/utils/logger/types.go @@ -0,0 +1,12 @@ +package logger + +type LogLevel int + +const ( + INFO LogLevel = iota + WARN + ERROR + FATAL +) + +const TIMESTAMP_FORMAT = "2006-01-02 15:04:05.000" diff --git a/utils/text/truncate.go b/utils/text/truncate.go deleted file mode 100644 index a5ddb05c8..000000000 --- a/utils/text/truncate.go +++ /dev/null @@ -1,9 +0,0 @@ -package text - -func TruncateString(s string, n int) string { - if len(s) <= n { - return s - } - - return s[:n] -} diff --git a/utils/timesource/new.go b/utils/timesource/new.go new file mode 100644 index 000000000..148bb59ba --- /dev/null +++ b/utils/timesource/new.go @@ -0,0 +1,10 @@ +package timesource + +import "time" + +func New(baseTime, refTime time.Time) Source { + return Source{ + LocalBaseTime: baseTime, + ReferenceTime: refTime, + } +} diff --git a/utils/timesource/time.go b/utils/timesource/time.go new file mode 100644 index 000000000..de84c7e19 --- /dev/null +++ b/utils/timesource/time.go @@ -0,0 +1,15 @@ +package timesource + +import ( + "errors" + "time" +) + +func (g *Source) GetTime() (time.Time, error) { + if g.LocalBaseTime.IsZero() || g.ReferenceTime.IsZero() { + return time.Now().UTC(), errors.New("empty BaseTime or RefTime is not allowed") + } + + elapsed := time.Since(g.LocalBaseTime.UTC()) + return g.ReferenceTime.Add(elapsed).UTC(), nil +} diff --git a/utils/timesource/types.go b/utils/timesource/types.go new file mode 100644 index 000000000..9c2ef221d --- /dev/null +++ b/utils/timesource/types.go @@ -0,0 +1,8 @@ +package timesource + +import "time" + +type Source struct { + LocalBaseTime time.Time + ReferenceTime time.Time +}