From baf334eea9fc8a22a7d8ba4fe772e3cd2fef1057 Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Sun, 15 Jan 2023 18:03:34 -0800 Subject: [PATCH 01/12] ISSUE-174 Create events_log table --- .gitignore | 1 + openapi.yaml | 263 ++++++++- services/postgis/tagbase_schema.sql | 844 +++------------------------- 3 files changed, 307 insertions(+), 801 deletions(-) diff --git a/.gitignore b/.gitignore index c8b4051..07524a4 100644 --- a/.gitignore +++ b/.gitignore @@ -66,6 +66,7 @@ target/ .env +postgis-data postgres-data logs tagbase_server/tagbase_server/coverage.xml diff --git a/openapi.yaml b/openapi.yaml index 3e4a4c2..cf1d2ac 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -7,7 +7,7 @@ info: description: | tagbse-server provides HTTP endpoints for ingestion of various files \ into a Tagbase SQL database. Input file support currently includes eTUFF (see [here](https://doi.org/10.6084/m9.figshare.10032848.v4) \ - and [here](https://doi.org/10.6084/m9.figshare.10159820.v1)). The REST API complies with [OpenAPI v3.0.3](https://spec.openapis.org/oas/v3.0.3.html). + and [here](https://doi.org/10.6084/m9.figshare.10159820.v1)). license: name: Apache License v2.0 url: https://www.apache.org/licenses/LICENSE-2.0 @@ -20,11 +20,82 @@ servers: - description: ICCAT Test tagbase-server url: https://162.13.162.49/tagbase/api/v0.7.0 tags: -- description: Ingestion operations. +- description: Events Logging + name: events +- description: Ingestion operations name: ingest -- description: Tag Operations. +- description: Tag Operations name: tags paths: + /events: + get: + description: Get information about all events + operationId: list_events + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: A list containing all events. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about all events + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + /events/{event_id}: + get: + description: Get information about an individual event + operationId: get_event + parameters: + - $ref: '#/components/parameters/eventId' + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/event200' + description: Information about an individual tag + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about an individual event + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + put: + description: Update notes for an event + operationId: put_tag + parameters: + - $ref: '#/components/parameters/notes' + - $ref: '#/components/parameters/tagId' + - $ref: '#/components/parameters/subId' + - $ref: '#/components/parameters/version' + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/eventPut200' + description: A success message confirming ingestion. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Update the 'notes' and/or 'version' associated with a tag submission + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller /ingest: get: description: Get network accessible file and execute ingestion @@ -50,13 +121,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get network accessible file and execute ingestion tags: @@ -93,13 +164,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Post a local file and perform a ingest operation tags: @@ -115,13 +186,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tags200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tags200' + description: A list of all tags. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about all tags tags: @@ -138,13 +209,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tag200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tag200' + description: Information about an individual tag. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about an individual tag tags: @@ -164,20 +235,28 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TagPut200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tagPut200' + description: Message confirming successful data update "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' - description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml summary: Update the 'notes' and/or 'version' associated with a tag submission tags: - tags x-openapi-router-controller: tagbase_server.controllers.tags_controller components: parameters: + eventId: + description: Numeric eventId + explode: true + in: path + name: event_id + required: true + schema: + type: number filename: description: Free-form text field to explicitly define the name of the file to be persisted explode: true @@ -249,13 +328,135 @@ components: example: "eTUFF-sailfish-117259_2.txt" title: filename type: string - Ingest200: + event200: + description: Information for a given event + example: + tag: + - date_time: '2022-04-01T04:58:21.319061+00:00' + filename: 'eTUFF-sailfish-117259_2.txt' + metadata: + person_owner: John Do + owner_contect: john@do.net + manufacturer: telemetry inc. + model: new_gen + attachment_method: anchor + notes: 'Ingested by admin on 2022-06-01 for Sailfish tagging campaign.' + submission_id: 5 + tag_id: 3 + version: '1' + - date_time: '2022-06-01T05:39:46.896088+00:00' + filename: 'eTUFF-sailfish-117259_2.txt' + metadata: + person_owner: Jane Do + owner_contect: jane@do.net + manufacturer: telemetry inc. + model: newer_gen + attachment_method: anchor + notes: 'Ingested by admin on 2022-06-01 for version 2 of the Sailfish tagging campaign.' + submission_id: 6 + tag_id: 3 + version: '2' + properties: + tag: + description: List containing one or more submissions for a given tag + items: + type: object + properties: + date_time: + description: Local datetime stamp at the time of eTUFF tag data file ingestion + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + type: string + filename: + $ref: '#/components/schemas/filename' + metadata: + description: Contains the ingested tag metadata consistent with the eTUFF specification + example: + person_owner: John Do + owner_contect: john@do.net + manufacturer: telemetry inc. + model: new_gen + attachment_method: anchor + type: object + additionalProperties: + type: string + notes: + description: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solutionpositional meta/data) + example: "Ingested by admin on XXXX-XX-XX to back-process campaign XYZ." + type: string + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase + example: 5 + type: integer + tag_id: + description: Unique numeric Tag ID associated with the ingested tag eTUFF data file + example: 3 + type: integer + version: + description: Version identifier for the eTUFF tag data file ingested + example: "1" + type: string + type: array + title: event200 + type: object + eventPut200: + description: Event update response + example: + code: "200" + message: Tag '1' submission '5' successfully updated. + properties: + code: + description: HTTP status code + example: "200" + type: string + message: + description: A string detailing specifics of an HTTP operation + example: Tag '1' submission '5' successfully updated. + type: string + title: eventPut200 + type: object + events200: + description: Response detailing all available events + example: + count: 2 + tags: + - tag_id: 1 + filename: eTUFF-sailfish-117259_2.txt + - tag_id: 2 + filename: eTUFF-sailfish-117259.txt + properties: + count: + description: Total count of unique tags + example: 2 + type: integer + tags: + description: List of unique numeric Tag IDs and associated filename + type: array + items: + description: Unique numeric Tag ID associated with the ingested tag eTUFF data file + example: + tag_id: 1 + filename: eTUFF-sailfish-117259_2.txt + properties: + tag_id: + description: Unique numeric Tag ID associated with the ingested tag data file + example: 1 + type: integer + filename: + description: Full name and extension of the ingested eTUFF tag data file + example: eTUFF-sailfish-117259.txt + type: string + type: object + title: tags200 + type: object + ingest200: description: HTTP 200 success response example: code: "200" elapsed: '0:00:06.506691' message: Data file eTUFF-sailfish-117259.txt successfully ingested into Tagbase - DB. + DB properties: code: description: HTTP status code @@ -267,11 +468,11 @@ components: message: description: A string detailing specifics of an HTTP operation example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string - title: Ingest200 + title: ingest200 type: object - Response500: + response500: description: 500 Internal Server Error example: code: "200" @@ -286,19 +487,19 @@ components: message: description: A string detailing specifics of the HTTP 500 response example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string more_info: - description: Additional details (if available) to diagnose the 500 response. + description: Additional details (if available) to diagnose the 500 response example: https://httpwg.org/specs/rfc7231.html#status.500 type: string trace: description: Trace diagnostic information related to the response example: 123e4567-e89b-12d3-a456-426614174000 type: string - title: Response500 + title: response500 type: object - Tag200: + tag200: description: Information for a given tag example: tag: @@ -368,9 +569,9 @@ components: example: "1" type: string type: array - title: Tag200 + title: tag200 type: object - TagPut200: + tagPut200: description: HTTP 200 success response example: code: "200" @@ -384,9 +585,9 @@ components: description: A string detailing specifics of an HTTP operation example: Tag '1' submission '5' successfully updated. type: string - title: TagPut200 + title: tagPut200 type: object - Tags200: + tags200: description: Response detailing all available unique tags and associated filename example: count: 2 @@ -418,5 +619,5 @@ components: example: eTUFF-sailfish-117259.txt type: string type: object - title: Tags200 + title: tags200 type: object diff --git a/services/postgis/tagbase_schema.sql b/services/postgis/tagbase_schema.sql index ce27ba4..d3dea82 100644 --- a/services/postgis/tagbase_schema.sql +++ b/services/postgis/tagbase_schema.sql @@ -24,492 +24,233 @@ SET default_table_access_method = heap; SET default_with_oids = false; --- --- Name: data_histogram_bin_data; Type: TABLE; Schema: public; Owner: postgres --- - -CREATE TABLE data_histogram_bin_data ( - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, - bin_id bigint NOT NULL, - bin_class integer NOT NULL, - date_time timestamp(6) with time zone NOT NULL, - variable_value character varying(30) NOT NULL, - position_date_time timestamp(6) with time zone, - variable_id bigint NOT NULL +CREATE TYPE status_enum AS ENUM ('FAILED', 'FINISHED', 'KILLED', 'MIGRATION', 'POSTMIGRATION', 'PREMIGRATION'); + +CREATE TABLE events_log ( + submission_id integer NOT NULL, + tag_id integer NOT NULL, + event_id integer NOT NULL, + event_category character varying(30) NOT NULL, + event_name character varying(30) NOT NULL, + time_start timestamp(6) with time zone NOT NULL, + time_end timestamp(6) with time zone NOT NULL, + duration TIME NOT NULL, + event_status status_enum NOT NULL, + event_notes text ); +ALTER TABLE events_log OWNER TO postgres; -ALTER TABLE data_histogram_bin_data OWNER TO postgres; +COMMENT ON TABLE events_log IS 'Contains a persistent events register for capturing application behaviour, usage, statuses and anomalies pertaining to data. Capture, in an easily extensible form, key discrete database events relating to the ingestion, migration, summarization (and possibly external connection, querying, and usage). This includes records of event type, timing, and status/outcome pertaining to a given tag dataset and submission.'; --- --- Name: TABLE data_histogram_bin_data; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN events_log.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; -COMMENT ON TABLE data_histogram_bin_data IS 'Contains the frequency for corresponding summary data binning schemes (migrated from proc_observations)'; +COMMENT ON COLUMN events_log.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; +COMMENT ON COLUMN events_log.event_id IS 'Unique numeric ID associated with the persisted event'; --- --- Name: COLUMN data_histogram_bin_data.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN events_log.event_category IS 'A specific phase within the Tagbase lifecycle e.g. submission, ingestion, migration, reporting.'; -COMMENT ON COLUMN data_histogram_bin_data.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; +COMMENT ON COLUMN events_log.event_name IS 'Name tying the event category to the actual event characteristics'; +COMMENT ON COLUMN events_log.time_start IS 'Event start time'; --- --- Name: COLUMN data_histogram_bin_data.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN events_log.time_end IS 'Event end time'; -COMMENT ON COLUMN data_histogram_bin_data.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; +COMMENT ON COLUMN events_log.duration IS 'Event duration'; +COMMENT ON COLUMN events_log.event_status IS 'Enumerated event status'; --- --- Name: COLUMN data_histogram_bin_data.bin_id; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN data_histogram_bin_data.bin_id IS 'Unique bin ID for the summary bin-frequency class'; +COMMENT ON COLUMN events_log.event_notes IS 'User-defined notes to better describe an event'; +CREATE TABLE data_histogram_bin_data ( + submission_id integer NOT NULL, + tag_id integer NOT NULL, + bin_id integer NOT NULL, + bin_class integer NOT NULL, + date_time timestamp(6) with time zone NOT NULL, + variable_value character varying(30) NOT NULL, + position_date_time timestamp(6) with time zone, + variable_id integer NOT NULL +); --- --- Name: COLUMN data_histogram_bin_data.bin_class; Type: COMMENT; Schema: public; Owner: postgres --- +ALTER TABLE data_histogram_bin_data OWNER TO postgres; -COMMENT ON COLUMN data_histogram_bin_data.bin_class IS 'Sequential numeric bin class identifier related to either Depth or Temperature. Usually there are 12 (1-12) bin ranges (Min and Max Depth or Temperature respectively), however there are times the bin ranges will not be 12, but instead 14 or 16. The larger the number, the more recent the tag models are from tag manufacturers, as they make more bytes available for storage.'; +COMMENT ON TABLE data_histogram_bin_data IS 'Contains the frequency for corresponding summary data binning schemes (migrated from proc_observations)'; +COMMENT ON COLUMN data_histogram_bin_data.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; --- --- Name: COLUMN data_histogram_bin_data.date_time; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN data_histogram_bin_data.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; -COMMENT ON COLUMN data_histogram_bin_data.date_time IS 'Date/time stamp of the tag summarized bin-frequency data record'; +COMMENT ON COLUMN data_histogram_bin_data.bin_id IS 'Unique bin ID for the summary bin-frequency class'; +COMMENT ON COLUMN data_histogram_bin_data.bin_class IS 'Sequential numeric bin class identifier related to either Depth or Temperature. Usually there are 12 (1-12) bin ranges (Min and Max Depth or Temperature respectively), however there are times the bin ranges will not be 12, but instead 14 or 16. The larger the number, the more recent the tag models are from tag manufacturers, as they make more bytes available for storage.'; --- --- Name: COLUMN data_histogram_bin_data.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN data_histogram_bin_data.date_time IS 'Date/time stamp of the tag summarized bin-frequency data record'; COMMENT ON COLUMN data_histogram_bin_data.variable_value IS 'Aggregate measure for the given bin-interval of the geophysical value of the observed tag variable record'; - --- --- Name: COLUMN data_histogram_bin_data.position_date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_data.position_date_time IS 'Date/time stamp of nearest matched associated positional record'; - --- --- Name: COLUMN data_histogram_bin_data.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_data.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table. Note that records in this table are NOT expected to be equivalent to those in the variable_id column of the data_histogram_bin_info table'; - --- --- Name: data_histogram_bin_info; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_histogram_bin_info ( - bin_id bigint NOT NULL, + bin_id integer NOT NULL, bin_class integer NOT NULL, min_value character varying(30) NOT NULL, max_value character varying(30) NOT NULL, - variable_id bigint NOT NULL + variable_id integer NOT NULL ); - ALTER TABLE data_histogram_bin_info OWNER TO postgres; --- --- Name: TABLE data_histogram_bin_info; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_histogram_bin_info IS 'Contains definitions of binning schemes for summary tag data (migrated from proc_observations)'; - --- --- Name: COLUMN data_histogram_bin_info.bin_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.bin_id IS 'Unique bin ID for the summary bin-frequency class'; - --- --- Name: COLUMN data_histogram_bin_info.bin_class; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.bin_class IS 'Sequential numeric bin class identifier related to either Depth or Temperature. Usually there are 12 (1-12) bin ranges (Min and Max Depth or Temperature respectively), however there are times the bin ranges will not be 12, but instead 14 or 16. The larger the number, the more recent the tag models are from tag manufacturers, as they make more bytes available for storage.'; - --- --- Name: COLUMN data_histogram_bin_info.min_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.min_value IS 'Value of minimum/lower bound of bin interval'; - --- --- Name: COLUMN data_histogram_bin_info.max_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.max_value IS 'Value of maximum/upper bound of bin interval'; - --- --- Name: COLUMN data_histogram_bin_info.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table. Note that records in this table are NOT expected to be equivalent to those in the variable_id column of the data_histogram_bin_data table'; - --- --- Name: data_position; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_position ( date_time timestamp(6) with time zone NOT NULL, lat character varying(30) NOT NULL, lon character varying(30) NOT NULL, lat_err character varying(30), lon_err character varying(30), - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, argos_location_class character varying(1), solution_id integer NOT NULL DEFAULT 1, flag_as_reference integer NOT NULL DEFAULT 0 ); - ALTER TABLE data_position OWNER TO postgres; --- --- Name: TABLE data_position; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_position IS 'Contains the tag positional data series with associated Lat/Lon error estimates where available (migrated from proc_observations)'; - --- --- Name: COLUMN data_position.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.date_time IS 'Date/time stamp of the tag positional data record'; - --- --- Name: COLUMN data_position.lat; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lat IS 'Latitude in decimal degrees of the positional data tag record'; - --- --- Name: COLUMN data_position.lon; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lon IS 'Longitude in decimal degrees of the positional data tag record'; - --- --- Name: COLUMN data_position.lat_err; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lat_err IS 'Error associated with the tag record Latitudinal positional estimate'; - --- --- Name: COLUMN data_position.lon_err; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lon_err IS 'Error associated with the tag record Longitudinal positional estimate'; - --- --- Name: COLUMN data_position.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN data_position.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN data_position.argos_location_class; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.argos_location_class IS 'ARGOS Location Class code (G,3,2,1,0,A,B,Z) https://www.argos-system.org/wp-content/uploads/2016/08/r363_9_argos_users_manual-v1.6.6.pdf , page 13.'; - --- --- Name: COLUMN data_position.solution_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.solution_id IS 'Unique numeric identifier for a given tag geolocation dataset solution. solution_id=1 is assigned to the primary or approved solution. Incremented solution_id''s assigned to other positional dataset solutions for a given tag_id and submission_id'; - --- --- Name: COLUMN data_position.flag_as_reference; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.flag_as_reference IS 'Integer (representing psudo boolean value) flag field which identifies whether positional data associated with a given Tag and Track solution are considered to be coordinates of the "Reference" track (ie. best solution currently). Coordinate record takes 1 if it is part of the Reference track or 0 if it is not.'; --- --- Name: data_profile; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_profile ( - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, - variable_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, + variable_id integer NOT NULL, date_time timestamp(6) with time zone NOT NULL, depth character varying(30) NOT NULL, variable_value character varying(30) DEFAULT '', position_date_time timestamp(6) with time zone ); - ALTER TABLE data_profile OWNER TO postgres; --- --- Name: TABLE data_profile; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_profile IS 'Contains the summarized bin profile tag observations (migrated from proc_observations)'; - --- --- Name: COLUMN data_profile.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN data_profile.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN data_profile.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; - --- --- Name: COLUMN data_profile.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.date_time IS 'Date/time stamp of the tag data record'; - --- --- Name: COLUMN data_profile.depth; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.depth IS 'Depth of the tag data record'; - --- --- Name: COLUMN data_profile.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.variable_value IS 'Geophysical value of the observed tag variable record'; - --- --- Name: COLUMN data_profile.position_date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.position_date_time IS 'Date/time stamp of nearest matched associated positional record'; - --- --- Name: data_time_series; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_time_series ( date_time timestamp(6) with time zone NOT NULL, - variable_id bigint NOT NULL, + variable_id integer NOT NULL, variable_value character varying(30) NOT NULL, - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, position_date_time timestamp(6) with time zone ); - ALTER TABLE data_time_series OWNER TO postgres; --- --- Name: TABLE data_time_series; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_time_series IS 'Contains the continuous measurement archival time series of tag geophysical measurements (migrated from proc_observations)'; - --- --- Name: COLUMN data_time_series.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.date_time IS 'Date/time stamp of the tag data record'; - --- --- Name: COLUMN data_time_series.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; - --- --- Name: COLUMN data_time_series.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.variable_value IS 'Geophysical value of the observed tag variable record'; - --- --- Name: COLUMN data_time_series.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN data_time_series.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN data_time_series.position_date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.position_date_time IS 'Date/time stamp of nearest matched associated positional record'; - --- --- Name: metadata; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE metadata ( - submission_id bigint NOT NULL, - attribute_id bigint NOT NULL, + submission_id integer NOT NULL, + attribute_id integer NOT NULL, attribute_value text NOT NULL, - tag_id bigint NOT NULL + tag_id integer NOT NULL ); - ALTER TABLE metadata OWNER TO postgres; --- --- Name: TABLE metadata; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE metadata IS 'Contains the ingested tag metadata consistent with the eTUFF specification'; - --- --- Name: COLUMN metadata.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN metadata.attribute_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.attribute_id IS 'Unique numeric metadata attribute ID based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata.attribute_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.attribute_value IS 'Value associated with the given eTUFF metadata attribute'; - --- --- Name: COLUMN metadata.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: metadata_position; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE metadata_position ( - submission_id bigint NOT NULL, - attribute_id bigint NOT NULL, + submission_id integer NOT NULL, + attribute_id integer NOT NULL, attribute_value text NOT NULL, - tag_id bigint NOT NULL, + tag_id integer NOT NULL, solution_id integer NOT NULL DEFAULT 1 ); - ALTER TABLE metadata_position OWNER TO postgres; --- --- Name: TABLE metadata_position; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE metadata_position IS 'Contains the ingested tag metadata consistent with the eTUFF specification'; - --- --- Name: COLUMN metadata_position.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN metadata_position.attribute_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.attribute_id IS 'Unique numeric metadata attribute ID based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_position.attribute_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.attribute_value IS 'Value associated with the given eTUFF metadata attribute'; - --- --- Name: COLUMN metadata_position.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN metadata_position.solution_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.solution_id IS 'Unique numeric identifier for a given tag geolocation dataset solution. solution_id=1 is assigned to the primary or approved solution. Incremented solution_id''s assigned to other positional dataset solutions for a given tag_id and submission_id'; - --- --- Name: metadata_types; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE metadata_types ( - attribute_id bigint NOT NULL, + attribute_id integer NOT NULL, category character varying(1024) NOT NULL, attribute_name character varying(1024) NOT NULL, description text NOT NULL, @@ -518,71 +259,26 @@ CREATE TABLE metadata_types ( necessity character varying(1024) NOT NULL ); - ALTER TABLE metadata_types OWNER TO postgres; --- --- Name: TABLE metadata_types; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE metadata_types IS 'Contains descriptive information on tag metadata based on the eTUFF specification'; - --- --- Name: COLUMN metadata_types.attribute_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.attribute_id IS 'Unique numeric metadata attribute ID based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.category; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.category IS 'Metadata attribute category or group based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.attribute_name; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.attribute_name IS 'Name of metadata attribute based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.description; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.description IS 'Description of metadata attribute based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.example; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.example IS 'Example value of metadata attribute on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.comments; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.comments IS 'Comments or notes relating to the metadata attribute based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.necessity; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.necessity IS 'Designation of the metadata attribute as Required, Recommended, or Optional based on the eTUFF metadata specification'; - --- --- Name: observation_types; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE observation_types ( - variable_id bigint NOT NULL, + variable_id integer NOT NULL, variable_name character varying(255) NOT NULL, standard_name character varying(255), variable_source character varying(255), @@ -591,69 +287,24 @@ CREATE TABLE observation_types ( standard_unit character varying(255) ); - ALTER TABLE observation_types OWNER TO postgres; --- --- Name: TABLE observation_types; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE observation_types IS 'Contains listings and descriptions of observation variable types based on the eTUFF specification'; - --- --- Name: COLUMN observation_types.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_id IS 'Unique variable identifier based on the eTUFF tag data file specification'; - --- --- Name: COLUMN observation_types.variable_name; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_name IS 'Variable name based on the eTUFF tag data file specification'; - --- --- Name: COLUMN observation_types.standard_name; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.standard_name IS 'CF Standard name for observation variable, if available'; - --- --- Name: COLUMN observation_types.variable_source; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_source IS 'Source authority for the given variables'; - --- --- Name: COLUMN observation_types.variable_units; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_units IS 'Units of the variable based on the eTUFF tag data file specification'; - --- --- Name: COLUMN observation_types.notes; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.notes IS 'Notes or comments relating to the variable'; - --- --- Name: COLUMN observation_types.standard_unit; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.standard_unit IS 'CF canonical standard unit for observation variable, if available'; - --- --- Name: observation_types_variable_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres --- - CREATE SEQUENCE observation_types_variable_id_seq START WITH 1 INCREMENT BY 1 @@ -661,142 +312,57 @@ CREATE SEQUENCE observation_types_variable_id_seq NO MAXVALUE CACHE 1; - ALTER TABLE observation_types_variable_id_seq OWNER TO postgres; --- --- Name: observation_types_variable_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres --- - ALTER SEQUENCE observation_types_variable_id_seq OWNED BY observation_types.variable_id; - --- --- Name: proc_observations; Type: TABLE; Schema: public; Owner: postgres --- - CREATE UNLOGGED TABLE proc_observations ( date_time timestamp(6) with time zone NOT NULL, - variable_id bigint NOT NULL, + variable_id integer NOT NULL, variable_value character varying(30) NOT NULL, - submission_id bigint NOT NULL, - tag_id bigint NOT NULL + submission_id integer NOT NULL, + tag_id integer NOT NULL ); +ALTER TABLE proc_observations OWNER TO postgres; -ALTER TABLE proc_observations OWNER TO postgres; - --- --- Name: TABLE proc_observations; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON TABLE proc_observations IS 'Contains staged source tag eTUFF data imported into Tagbase'; - - --- --- Name: COLUMN proc_observations.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.date_time IS 'Date/time stamp of data record from source eTUFF file ingested'; - - --- --- Name: COLUMN proc_observations.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; - - --- --- Name: COLUMN proc_observations.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.variable_value IS 'Value of the given observation_type variable for the eTUFF data record'; - - --- --- Name: COLUMN proc_observations.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; +COMMENT ON TABLE proc_observations IS 'Contains staged source tag eTUFF data imported into Tagbase'; +COMMENT ON COLUMN proc_observations.date_time IS 'Date/time stamp of data record from source eTUFF file ingested'; --- --- Name: COLUMN proc_observations.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN proc_observations.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; -COMMENT ON COLUMN proc_observations.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; +COMMENT ON COLUMN proc_observations.variable_value IS 'Value of the given observation_type variable for the eTUFF data record'; +COMMENT ON COLUMN proc_observations.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; --- --- Name: submission; Type: TABLE; Schema: public; Owner: postgres --- +COMMENT ON COLUMN proc_observations.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; CREATE TABLE submission ( - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, date_time timestamp(6) with time zone DEFAULT now() NOT NULL, filename text NOT NULL, version character varying(50), notes text ); - ALTER TABLE submission OWNER TO postgres; --- --- Name: TABLE submission; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE submission IS 'Contains information on source tag eTUFF files submitted for ingest into Tagbase'; - --- --- Name: COLUMN submission.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN submission.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.tag_id IS 'Unique numeric Tag ID associated with the ingested tag eTUFF data file'; - --- --- Name: COLUMN submission.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.date_time IS 'Local datetime stamp at the time of eTUFF tag data file ingestion'; - --- --- Name: COLUMN submission.filename; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.filename IS 'Full path, name and extension of the ingested eTUFF tag data file'; - --- --- Name: COLUMN submission.version; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.version IS 'Version identifier for the eTUFF tag data file ingested'; - --- --- Name: COLUMN submission.notes; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.notes IS 'Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solutionpositional meta/data)'; - --- --- Name: submission_submission_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres --- - CREATE SEQUENCE submission_submission_id_seq START WITH 1 INCREMENT BY 1 @@ -804,20 +370,10 @@ CREATE SEQUENCE submission_submission_id_seq NO MAXVALUE CACHE 1; - ALTER TABLE submission_submission_id_seq OWNER TO postgres; --- --- Name: submission_submission_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres --- - ALTER SEQUENCE submission_submission_id_seq OWNED BY submission.submission_id; - --- --- Name: submission_tag_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres --- - CREATE SEQUENCE submission_tag_id_seq START WITH 1 INCREMENT BY 1 @@ -825,401 +381,149 @@ CREATE SEQUENCE submission_tag_id_seq NO MAXVALUE CACHE 1; - ALTER TABLE submission_tag_id_seq OWNER TO postgres; ALTER SEQUENCE submission_tag_id_seq OWNED BY submission.tag_id; --- --- Name: observation_types variable_id; Type: DEFAULT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY observation_types ALTER COLUMN variable_id SET DEFAULT nextval('observation_types_variable_id_seq'::regclass); - --- --- Name: submission submission_id; Type: DEFAULT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY submission ALTER COLUMN submission_id SET DEFAULT nextval('submission_submission_id_seq'::regclass); - --- --- Data for Name: data_histogram_bin_data; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_histogram_bin_data (submission_id, tag_id, bin_id, bin_class, date_time, variable_value, position_date_time, variable_id) FROM stdin; \. - --- --- Data for Name: data_histogram_bin_info; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_histogram_bin_info (bin_id, bin_class, min_value, max_value, variable_id) FROM stdin; \. - --- --- Data for Name: data_position; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_position (date_time, lat, lon, lat_err, lon_err, submission_id, tag_id, argos_location_class, solution_id) FROM stdin; \. - --- --- Data for Name: data_profile; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_profile (submission_id, tag_id, variable_id, date_time, depth, variable_value, position_date_time) FROM stdin; \. - --- --- Data for Name: data_time_series; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_time_series (date_time, variable_id, variable_value, submission_id, tag_id, position_date_time) FROM stdin; \. - --- --- Data for Name: metadata; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY metadata (submission_id, attribute_id, attribute_value, tag_id) FROM stdin; \. - --- --- Data for Name: metadata_position; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY metadata_position (submission_id, attribute_id, attribute_value, tag_id, solution_id) FROM stdin; \. - --- --- Data for Name: proc_observations; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY proc_observations (date_time, variable_id, variable_value, submission_id, tag_id) FROM stdin; \. - --- --- Data for Name: submission; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY submission (submission_id, tag_id, date_time, filename, version, notes) FROM stdin; \. - --- --- Name: observation_types_variable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres --- - SELECT pg_catalog.setval('observation_types_variable_id_seq', 1, false); - --- --- Name: submission_submission_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres --- - SELECT pg_catalog.setval('submission_submission_id_seq', 1, false); - --- --- Name: submission_tag_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres --- - SELECT pg_catalog.setval('submission_tag_id_seq', 1, false); - --- --- Name: data_histogram_bin_data data_histogram_bin_data_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT data_histogram_bin_data_pkey PRIMARY KEY (submission_id, tag_id, variable_id, bin_id, bin_class, date_time) WITH (fillfactor='100'); - --- --- Name: data_histogram_bin_info data_histogram_bin_info_bin_id_bin_class_key; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_info ADD CONSTRAINT data_histogram_bin_info_bin_id_bin_class_key UNIQUE (bin_id, bin_class); - --- --- Name: data_histogram_bin_info data_histogram_bin_info_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_info ADD CONSTRAINT data_histogram_bin_info_pkey PRIMARY KEY (variable_id, bin_id, bin_class) WITH (fillfactor='100'); - --- --- Name: data_position data_position_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_position ADD CONSTRAINT data_position_pkey PRIMARY KEY (submission_id, tag_id, solution_id, date_time) WITH (fillfactor='100'); - --- --- Name: data_profile data_profile_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_profile ADD CONSTRAINT data_profile_pkey PRIMARY KEY (submission_id, tag_id, date_time, depth, variable_id) WITH (fillfactor='100'); - --- --- Name: data_time_series data_time_series_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_time_series ADD CONSTRAINT data_time_series_pkey PRIMARY KEY (submission_id, tag_id, variable_id, date_time) WITH (fillfactor='100'); - --- --- Name: metadata metadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata ADD CONSTRAINT metadata_pkey PRIMARY KEY (submission_id, attribute_id); - --- --- Name: metadata_position metadata_pkey01; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_position ADD CONSTRAINT metadata_pkey01 PRIMARY KEY (submission_id, attribute_id, tag_id, solution_id) WITH (fillfactor='100'); - --- --- Name: metadata_types metadata_types_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_types ADD CONSTRAINT metadata_types_pkey PRIMARY KEY (attribute_id); - --- --- Name: observation_types observation_types_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY observation_types ADD CONSTRAINT observation_types_pkey PRIMARY KEY (variable_id); - --- --- Name: observation_types observation_types_variable_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY observation_types ADD CONSTRAINT observation_types_variable_name_key UNIQUE (variable_name); - --- --- Name: proc_observations proc_observations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_pkey PRIMARY KEY (date_time, variable_id, submission_id); - --- --- Name: submission submission_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY submission ADD CONSTRAINT submission_pkey PRIMARY KEY (submission_id); - --- --- Name: data_histogram_bin_data_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_histogram_bin_data_date_time_index ON data_histogram_bin_data USING btree (date_time); - --- --- Name: data_histogram_bin_data_pos_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_histogram_bin_data_pos_date_time_index ON data_histogram_bin_data USING btree (position_date_time); - --- --- Name: data_position_date_time; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_position_date_time ON data_position USING btree (date_time); - --- --- Name: data_position_latlontime_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_position_latlontime_index ON data_position USING btree (submission_id, tag_id, solution_id, date_time, lat, lon, argos_location_class) WITH (fillfactor='100'); - --- --- Name: data_profile_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_profile_date_time_index ON data_profile USING btree (date_time); - --- --- Name: data_profile_pos_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_profile_pos_date_time_index ON data_profile USING btree (position_date_time); - --- --- Name: data_time_series_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_time_series_date_time_index ON data_time_series USING btree (date_time); - --- --- Name: data_time_series_pos_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_time_series_pos_date_time_index ON data_time_series USING btree (position_date_time); - --- --- Name: data_histogram_bin_data data_histogram_bin_data_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT data_histogram_bin_data_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: data_histogram_bin_info data_histogram_bin_info; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_info ADD CONSTRAINT data_histogram_bin_info FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_position data_position_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_position ADD CONSTRAINT data_position_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: data_time_series data_time_series_data_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_time_series ADD CONSTRAINT data_time_series_data_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); - --- --- Name: data_time_series data_time_series_variable_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_time_series ADD CONSTRAINT data_time_series_variable_id_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_histogram_bin_data datahistogrambindata_observationtypes_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT datahistogrambindata_observationtypes_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_profile dataprofile_observationtypes_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_profile ADD CONSTRAINT dataprofile_observationtypes_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_profile dataprofile_submission_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_profile ADD CONSTRAINT dataprofile_submission_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); - --- --- Name: data_histogram_bin_data histogrambindata_histogrambininfo_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT histogrambindata_histogrambininfo_fkey FOREIGN KEY (bin_id, bin_class) REFERENCES data_histogram_bin_info(bin_id, bin_class); - --- --- Name: metadata metadata_attribute_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata ADD CONSTRAINT metadata_attribute_id_fkey FOREIGN KEY (attribute_id) REFERENCES metadata_types(attribute_id); - --- --- Name: metadata_position metadata_attribute_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_position ADD CONSTRAINT metadata_attribute_id_fkey FOREIGN KEY (attribute_id) REFERENCES metadata_types(attribute_id); - --- --- Name: metadata metadata_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata ADD CONSTRAINT metadata_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: metadata_position metadata_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_position ADD CONSTRAINT metadata_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: proc_observations proc_observations_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: proc_observations proc_observations_variable_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_variable_id_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- PostgreSQL database dump complete --- +ALTER TABLE ONLY event_log + ADD CONSTRAINT event_log_submission_fkey FOREIGN KEY (submission_id, tag_id) REFERENCES submission(submission_id, tag_id); -- -- The following TRIGGER ensures that upon ingestion of an eTUFF file into tagbase-server, From d169b6af41160462b0340d408caa1ea1fd254f59 Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Sun, 15 Jan 2023 18:06:26 -0800 Subject: [PATCH 02/12] ISSUE-174 Create events_log table --- openapi.yaml | 10 ++++------ services/postgis/tagbase_schema.sql | 4 ++-- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/openapi.yaml b/openapi.yaml index cf1d2ac..c36107d 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -20,7 +20,7 @@ servers: - description: ICCAT Test tagbase-server url: https://162.13.162.49/tagbase/api/v0.7.0 tags: -- description: Events Logging +- description: Events Operations name: events - description: Ingestion operations name: ingest @@ -73,12 +73,10 @@ paths: x-openapi-router-controller: tagbase_server.controllers.events_controller put: description: Update notes for an event - operationId: put_tag + operationId: put_event parameters: + - $ref: '#/components/parameters/eventId' - $ref: '#/components/parameters/notes' - - $ref: '#/components/parameters/tagId' - - $ref: '#/components/parameters/subId' - - $ref: '#/components/parameters/version' responses: "200": content: @@ -92,7 +90,7 @@ paths: schema: $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. - summary: Update the 'notes' and/or 'version' associated with a tag submission + summary: Update the 'notes' associated with a event tags: - events x-openapi-router-controller: tagbase_server.controllers.events_controller diff --git a/services/postgis/tagbase_schema.sql b/services/postgis/tagbase_schema.sql index d3dea82..8697dc3 100644 --- a/services/postgis/tagbase_schema.sql +++ b/services/postgis/tagbase_schema.sql @@ -522,8 +522,8 @@ ALTER TABLE ONLY proc_observations ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_variable_id_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); -ALTER TABLE ONLY event_log - ADD CONSTRAINT event_log_submission_fkey FOREIGN KEY (submission_id, tag_id) REFERENCES submission(submission_id, tag_id); +ALTER TABLE ONLY events_log + ADD CONSTRAINT events_log_submission_fkey FOREIGN KEY (submission_id, tag_id) REFERENCES submission(submission_id, tag_id); -- -- The following TRIGGER ensures that upon ingestion of an eTUFF file into tagbase-server, From 0cfcf4ba5c20829cf2238031d00efcbd5183a76a Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Sun, 15 Jan 2023 18:07:15 -0800 Subject: [PATCH 03/12] ISSUE-174 Create events_log table --- openapi.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openapi.yaml b/openapi.yaml index c36107d..db6b9e2 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -20,7 +20,7 @@ servers: - description: ICCAT Test tagbase-server url: https://162.13.162.49/tagbase/api/v0.7.0 tags: -- description: Events Operations +- description: Event Operations name: events - description: Ingestion operations name: ingest From a9fa7b7a932cbf4608ebcb620bbf6421704c7f6b Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Wed, 25 Jan 2023 22:35:04 -0800 Subject: [PATCH 04/12] ISSUE-174 Create events_log table --- openapi.yaml | 234 ++++++++++++++++++++++++++++----------------------- 1 file changed, 129 insertions(+), 105 deletions(-) diff --git a/openapi.yaml b/openapi.yaml index db6b9e2..59a1bdc 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -30,7 +30,7 @@ paths: /events: get: description: Get information about all events - operationId: list_events + operationId: list_all_events responses: "200": content: @@ -245,10 +245,34 @@ paths: tags: - tags x-openapi-router-controller: tagbase_server.controllers.tags_controller + /tags/{tag_id}/subs/{sub_id}/events: + get: + description: Get all events for a given tag submission + operationId: list_events + parameters: + - $ref: '#/components/parameters/tagId' + - $ref: '#/components/parameters/subId' + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: Message confirming successful data update + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml + summary: Get all events for a given tag submission + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller components: parameters: eventId: - description: Numeric eventId + description: Numeric event ID explode: true in: path name: event_id @@ -280,7 +304,7 @@ components: type: string style: form subId: - description: Existing submission id for an existing tag + description: Numeric submission ID explode: true in: path name: sub_id @@ -288,7 +312,7 @@ components: schema: type: number tagId: - description: Existing tag id + description: Numeric tag ID explode: true in: path name: tag_id @@ -329,122 +353,120 @@ components: event200: description: Information for a given event example: - tag: - - date_time: '2022-04-01T04:58:21.319061+00:00' - filename: 'eTUFF-sailfish-117259_2.txt' - metadata: - person_owner: John Do - owner_contect: john@do.net - manufacturer: telemetry inc. - model: new_gen - attachment_method: anchor - notes: 'Ingested by admin on 2022-06-01 for Sailfish tagging campaign.' - submission_id: 5 - tag_id: 3 - version: '1' - - date_time: '2022-06-01T05:39:46.896088+00:00' - filename: 'eTUFF-sailfish-117259_2.txt' - metadata: - person_owner: Jane Do - owner_contect: jane@do.net - manufacturer: telemetry inc. - model: newer_gen - attachment_method: anchor - notes: 'Ingested by admin on 2022-06-01 for version 2 of the Sailfish tagging campaign.' - submission_id: 6 - tag_id: 3 - version: '2' + event_id: 1 + tag_id: 1 + submission_id: 1 + event_category: "submission" + event_name: "new tag submission" + time_start: "2022-04-01T04:58:21.319061+00:00" + time_end: "2022-04-01T04:59:21.319061+00:00" + duration: "0:01:00" + event_status: "finished" + event_notes: "Some user defined notes..." properties: - tag: - description: List containing one or more submissions for a given tag - items: - type: object - properties: - date_time: - description: Local datetime stamp at the time of eTUFF tag data file ingestion - example: '2022-04-01T04:58:21.319061+00:00' - #format: date - #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' - type: string - filename: - $ref: '#/components/schemas/filename' - metadata: - description: Contains the ingested tag metadata consistent with the eTUFF specification - example: - person_owner: John Do - owner_contect: john@do.net - manufacturer: telemetry inc. - model: new_gen - attachment_method: anchor - type: object - additionalProperties: - type: string - notes: - description: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solutionpositional meta/data) - example: "Ingested by admin on XXXX-XX-XX to back-process campaign XYZ." - type: string - submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase - example: 5 - type: integer - tag_id: - description: Unique numeric Tag ID associated with the ingested tag eTUFF data file - example: 3 - type: integer - version: - description: Version identifier for the eTUFF tag data file ingested - example: "1" - type: string - type: array + event_category: + description: ... + example: "..." + type: string + event_id: + $ref: '#/components/schemas/event_id' + event_name: + description: ... + example: "..." + type: string + event_notes: + description: Free-form text field where details of the event can be optionally entered by the client + example: "The event represents a data anomaly with file XYZ because of ..., this has been fixed in version ABC." + type: string + event_status: + description: Free-form text field where details of the event can be optionally entered by the client + enum: + - failed + - finished + - killed + - migration + - postmigration + - premigration + example: failed + type: string + time_start: + description: Local datetime stamp at the time of the event start + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + type: string + time_end: + description: Local datetime stamp at the time of the event end + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + type: string + duration: + description: The event duration e.g. different between 'time_start' and 'time_end' + example: "0:01:00" + type: string + submission_id: + $ref: '#/components/schemas/submission_id' + tag_id: + $ref: '#/components/schemas/tag_id' title: event200 type: object eventPut200: description: Event update response example: code: "200" - message: Tag '1' submission '5' successfully updated. + message: Notes for event '1' successfully updated. properties: code: description: HTTP status code example: "200" type: string message: - description: A string detailing specifics of an HTTP operation - example: Tag '1' submission '5' successfully updated. + description: A string detailing specifics of the HTTP operation + example: Notes for event '1' successfully updated. type: string title: eventPut200 type: object + event_id: + description: Unique numeric event ID associated with the ingested tag data file + example: 1 + title: event_id + type: integer events200: description: Response detailing all available events example: - count: 2 - tags: - - tag_id: 1 - filename: eTUFF-sailfish-117259_2.txt - - tag_id: 2 - filename: eTUFF-sailfish-117259.txt + count: 3 + events: + - event_id: 1 + tag_id: 1 + submission_id: 1 + - event_id: 2 + tag_id: 1 + submission_id: 1 + - event_id: 3 + tag_id: 1 + submission_id: 1 properties: count: - description: Total count of unique tags - example: 2 + description: Total count of unique events + example: 3 type: integer - tags: - description: List of unique numeric Tag IDs and associated filename + events: + description: List of unique numeric event IDs and associated tag and submission IDs type: array items: - description: Unique numeric Tag ID associated with the ingested tag eTUFF data file + description: Unique numeric event ID and associated tag and submission IDs example: + event_id: 1 tag_id: 1 - filename: eTUFF-sailfish-117259_2.txt + submission_id: 1 properties: + event_id: + $ref: '#/components/schemas/event_id' tag_id: - description: Unique numeric Tag ID associated with the ingested tag data file - example: 1 - type: integer - filename: - description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259.txt - type: string + $ref: '#/components/schemas/tag_id' + submission_id: + $ref: '#/components/schemas/submission_id' type: object title: tags200 type: object @@ -497,6 +519,11 @@ components: type: string title: response500 type: object + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer tag200: description: Information for a given tag example: @@ -527,7 +554,7 @@ components: version: '2' properties: tag: - description: List containing one or more submissions for a given tag + description: List containing submissions for a given tag items: type: object properties: @@ -555,13 +582,9 @@ components: example: "Ingested by admin on XXXX-XX-XX to back-process campaign XYZ." type: string submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase - example: 5 - type: integer + $ref: '#/components/schemas/submission_id' tag_id: - description: Unique numeric Tag ID associated with the ingested tag eTUFF data file - example: 3 - type: integer + $ref: '#/components/schemas/tag_id' version: description: Version identifier for the eTUFF tag data file ingested example: "1" @@ -585,6 +608,11 @@ components: type: string title: tagPut200 type: object + tag_id: + description: Unique numeric tag ID associated with the ingested tag data file + example: 1 + title: tag_id + type: integer tags200: description: Response detailing all available unique tags and associated filename example: @@ -609,13 +637,9 @@ components: filename: eTUFF-sailfish-117259_2.txt properties: tag_id: - description: Unique numeric Tag ID associated with the ingested tag data file - example: 1 - type: integer + $ref: '#/components/schemas/tag_id' filename: - description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259.txt - type: string + $ref: '#/components/schemas/filename' type: object title: tags200 type: object From a4cf601db8ff0b6f91b21418d67514efe5e83b4b Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Thu, 26 Jan 2023 02:57:24 -0800 Subject: [PATCH 05/12] ISSUE-174 Create events_log table --- docker-compose.yml | 7 +- openapi.yaml | 6 +- services/postgis/tagbase_schema.sql | 14 +- tagbase_server/.openapi-generator/FILES | 5 + tagbase_server/.openapi-generator/VERSION | 2 +- tagbase_server/README.md | 4 +- .../controllers/events_controller.py | 130 ++++++ .../controllers/ingest_controller.py | 13 +- .../controllers/tags_controller.py | 12 +- .../tagbase_server/models/__init__.py | 4 + .../tagbase_server/models/base_model_.py | 2 +- .../tagbase_server/models/event200.py | 344 +++++++++++++++ .../tagbase_server/models/event_put200.py | 88 ++++ .../tagbase_server/models/events200.py | 93 ++++ .../models/events200_events_inner.py | 118 +++++ .../tagbase_server/models/ingest200.py | 2 +- .../tagbase_server/models/response500.py | 6 +- .../tagbase_server/models/tag200.py | 6 +- .../tagbase_server/models/tag200_tag_inner.py | 6 +- .../tagbase_server/models/tag_put200.py | 2 +- .../tagbase_server/models/tags200.py | 2 +- .../models/tags200_tags_inner.py | 6 +- .../tagbase_server/openapi/openapi.yaml | 414 +++++++++++++++--- .../test/test_events_controller.py | 83 ++++ .../tagbase_server/utils/db_utils.py | 59 ++- .../tagbase_server/utils/io_utils.py | 2 - .../tagbase_server/utils/processing_utils.py | 23 +- 27 files changed, 1348 insertions(+), 105 deletions(-) create mode 100644 tagbase_server/tagbase_server/controllers/events_controller.py create mode 100644 tagbase_server/tagbase_server/models/event200.py create mode 100644 tagbase_server/tagbase_server/models/event_put200.py create mode 100644 tagbase_server/tagbase_server/models/events200.py create mode 100644 tagbase_server/tagbase_server/models/events200_events_inner.py create mode 100644 tagbase_server/tagbase_server/test/test_events_controller.py diff --git a/docker-compose.yml b/docker-compose.yml index 9a15b32..37fe9be 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,9 +18,11 @@ services: "docker_compose_diagram.icon": "docker" links: - postgis + networks: + - internal-network + restart: unless-stopped volumes: - ./dbbackups:/backups - restart: on-failure docker-cron: build: context: ./services/docker-cron @@ -108,8 +110,9 @@ services: postgis: environment: - ALLOW_IP_RANGE=0.0.0.0/0 + - EXTRA_CONF=log_destination = stderr\nlogging_collector = on - PASSWORD_AUTHENTICATION=md5 - - POSTGRES_DB=tagbase + #- POSTGRES_DB=tagbase - POSTGRES_PASS=${POSTGRES_PASSWORD} - POSTGRES_USER=tagbase - POSTGRES_MULTIPLE_EXTENSIONS=postgis,hstore,postgis_topology,postgis_raster,pgrouting diff --git a/openapi.yaml b/openapi.yaml index 59a1bdc..cfd7a62 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -90,7 +90,7 @@ paths: schema: $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. - summary: Update the 'notes' associated with a event + summary: Update the 'notes' associated with an event tags: - events x-openapi-router-controller: tagbase_server.controllers.events_controller @@ -384,9 +384,7 @@ components: - failed - finished - killed - - migration - - postmigration - - premigration + - running example: failed type: string time_start: diff --git a/services/postgis/tagbase_schema.sql b/services/postgis/tagbase_schema.sql index 8697dc3..949f2b5 100644 --- a/services/postgis/tagbase_schema.sql +++ b/services/postgis/tagbase_schema.sql @@ -24,17 +24,17 @@ SET default_table_access_method = heap; SET default_with_oids = false; -CREATE TYPE status_enum AS ENUM ('FAILED', 'FINISHED', 'KILLED', 'MIGRATION', 'POSTMIGRATION', 'PREMIGRATION'); +CREATE TYPE status_enum AS ENUM ('failed', 'finished', 'killed', 'running'); CREATE TABLE events_log ( - submission_id integer NOT NULL, - tag_id integer NOT NULL, - event_id integer NOT NULL, + submission_id integer, + tag_id integer, + event_id UUID NOT NULL, event_category character varying(30) NOT NULL, event_name character varying(30) NOT NULL, time_start timestamp(6) with time zone NOT NULL, - time_end timestamp(6) with time zone NOT NULL, - duration TIME NOT NULL, + time_end timestamp(6) with time zone, + duration double precision, event_status status_enum NOT NULL, event_notes text ); @@ -523,7 +523,7 @@ ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_variable_id_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); ALTER TABLE ONLY events_log - ADD CONSTRAINT events_log_submission_fkey FOREIGN KEY (submission_id, tag_id) REFERENCES submission(submission_id, tag_id); + ADD CONSTRAINT eventslog_submission_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); -- -- The following TRIGGER ensures that upon ingestion of an eTUFF file into tagbase-server, diff --git a/tagbase_server/.openapi-generator/FILES b/tagbase_server/.openapi-generator/FILES index ba6c8af..965acd0 100644 --- a/tagbase_server/.openapi-generator/FILES +++ b/tagbase_server/.openapi-generator/FILES @@ -4,12 +4,17 @@ README.md git_push.sh tagbase_server/__init__.py tagbase_server/controllers/__init__.py +tagbase_server/controllers/events_controller.py tagbase_server/controllers/ingest_controller.py tagbase_server/controllers/security_controller_.py tagbase_server/controllers/tags_controller.py tagbase_server/encoder.py tagbase_server/models/__init__.py tagbase_server/models/base_model_.py +tagbase_server/models/event200.py +tagbase_server/models/event_put200.py +tagbase_server/models/events200.py +tagbase_server/models/events200_events_inner.py tagbase_server/models/ingest200.py tagbase_server/models/response500.py tagbase_server/models/tag200.py diff --git a/tagbase_server/.openapi-generator/VERSION b/tagbase_server/.openapi-generator/VERSION index f4965a3..0df17dd 100644 --- a/tagbase_server/.openapi-generator/VERSION +++ b/tagbase_server/.openapi-generator/VERSION @@ -1 +1 @@ -6.0.0 \ No newline at end of file +6.2.1 \ No newline at end of file diff --git a/tagbase_server/README.md b/tagbase_server/README.md index a51b46d..e936b33 100644 --- a/tagbase_server/README.md +++ b/tagbase_server/README.md @@ -21,13 +21,13 @@ python3 -m tagbase_server and open your browser to here: ``` -http://localhost:8080/v0.7.0/ui/ +http://localhost:8080/tagbase/api/v0.7.0/ui/ ``` Your OpenAPI definition lives here: ``` -http://localhost:8080/v0.7.0/openapi.json +http://localhost:8080/tagbase/api/v0.7.0/openapi.json ``` To launch the integration tests, use tox: diff --git a/tagbase_server/tagbase_server/controllers/events_controller.py b/tagbase_server/tagbase_server/controllers/events_controller.py new file mode 100644 index 0000000..d84a833 --- /dev/null +++ b/tagbase_server/tagbase_server/controllers/events_controller.py @@ -0,0 +1,130 @@ +from tagbase_server.models.event200 import Event200 # noqa: E501 +from tagbase_server.models.event_put200 import EventPut200 # noqa: E501 +from tagbase_server.models.events200 import Events200 # noqa: E501 +from tagbase_server.models.response500 import Response500 # noqa: E501 +from tagbase_server import util + + +def get_event(event_id): # noqa: E501 + """Get information about an individual event + + Get information about an individual event # noqa: E501 + + :param event_id: Numeric event ID + :type event_id: + + :rtype: Union[Event200, Tuple[Event200, int], Tuple[Event200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "SELECT * FROM events_log WHERE event_id = %s", + (event_id,), + ) + result = cur.fetchone() + logger.info(result) + return Event200.from_dict( + { + "event_category": result[0], + "event_id": result[1], + "event_name": result[2], + "event_notes": result[3], + "event_status": result[4], + "time_start": result[5], + "time_end": result[6], + "duration": result[7], + "submission_id": result[8], + "tag_id": result[9], + } + ) + + +def list_all_events(): # noqa: E501 + """Get information about all events + + Get information about all events # noqa: E501 + + + :rtype: Union[Events200, Tuple[Events200, int], Tuple[Events200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "SELECT DISTINCT event_id, tag_id, submission_id FROM events_log ORDER BY tag_id", + ) + events = [] + for event in cur.fetchall(): + events.append( + { + "event_id": event[0], + "tag_id": event[1], + "submission_id": event[2], + } + ) + cur.execute( + "SELECT COUNT(DISTINCT event_id) FROM events_log", + ) + count = cur.fetchone()[0] + return Events200.from_dict({"count": count, "events": tags}) + + +def list_events(tag_id, sub_id): # noqa: E501 + """Get all events for a given tag submission + + Get all events for a given tag submission # noqa: E501 + + :param tag_id: Numeric tag ID + :type tag_id: + :param sub_id: Numeric submission ID + :type sub_id: + + :rtype: Union[Events200, Tuple[Events200, int], Tuple[Events200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "SELECT DISTINCT event_id, tag_id, submission_id " + "FROM events_log WHERE tag_id = %s AND submission_id = %s ORDER BY tag_id", + (tag_id, sub_id), + ) + events = [] + for event in cur.fetchall(): + events.append( + { + "event_id": event[0], + "tag_id": event[1], + "submission_id": event[2], + } + ) + cur.execute( + "SELECT COUNT(DISTINCT event_id) FROM events_log", + ) + count = cur.fetchone()[0] + return Events200.from_dict({"count": count, "events": tags}) + + +def put_event(event_id, notes=None): # noqa: E501 + """Update the 'notes' associated with an event + + Update notes for an event # noqa: E501 + + :param event_id: Numeric event ID + :type event_id: + :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) + :type notes: str + + :rtype: Union[EventPut200, Tuple[EventPut200, int], Tuple[EventPut200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + if notes is not None: + cur.execute( + "UPDATE events_log SET notes = %s WHERE event_id = %s", + (notes, event_id), + ) + message = f"Event: '{int(event_id)}' successfully updated." + return EventPut200.from_dict({"code": "200", "message": message}) diff --git a/tagbase_server/tagbase_server/controllers/ingest_controller.py b/tagbase_server/tagbase_server/controllers/ingest_controller.py index 9ae799e..0dcece2 100644 --- a/tagbase_server/tagbase_server/controllers/ingest_controller.py +++ b/tagbase_server/tagbase_server/controllers/ingest_controller.py @@ -4,6 +4,7 @@ import parmap from tagbase_server.models.ingest200 import Ingest200 # noqa: E501 +from tagbase_server.models.response500 import Response500 # noqa: E501 from tagbase_server.utils.io_utils import ( process_get_input_data, process_post_input_data, @@ -59,23 +60,21 @@ def ingest_get(file, notes=None, type=None, version=None): # noqa: E501 ) -def ingest_post( - filename=None, notes=None, type=None, version=None, body=None -): # noqa: E501 +def ingest_post(filename, body, notes=None, type=None, version=None): # noqa: E501 """Post a local file and perform a ingest operation Post a local file and perform a ingest operation # noqa: E501 - :param notes: Free-form text field to explicitly define the name of the file to be persisted - :type notes: str + :param filename: Free-form text field to explicitly define the name of the file to be persisted + :type filename: str + :param body: + :type body: str :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) :type notes: str :param type: Type of file to be ingested, defaults to 'etuff' :type type: str :param version: Version identifier for the eTUFF tag data file ingested :type version: str - :param body: Payload body - :type body: str :rtype: Union[Ingest200, Tuple[Ingest200, int], Tuple[Ingest200, int, Dict[str, str]] """ diff --git a/tagbase_server/tagbase_server/controllers/tags_controller.py b/tagbase_server/tagbase_server/controllers/tags_controller.py index 02dc4b4..80710fb 100644 --- a/tagbase_server/tagbase_server/controllers/tags_controller.py +++ b/tagbase_server/tagbase_server/controllers/tags_controller.py @@ -1,7 +1,9 @@ -from tagbase_server.utils.db_utils import connect - +from tagbase_server.models.response500 import Response500 # noqa: E501 from tagbase_server.models.tag200 import Tag200 # noqa: E501 from tagbase_server.models.tag_put200 import TagPut200 # noqa: E501 +from tagbase_server.models.tags200 import Tags200 # noqa: E501 +from tagbase_server.utils.db_utils import connect +from tagbase_server import util import logging @@ -13,7 +15,7 @@ def get_tag(tag_id): # noqa: E501 Get information about an individual tag # noqa: E501 - :param tag_id: Existing tag id + :param tag_id: Numeric tag ID :type tag_id: :rtype: Union[Tag200, Tuple[Tag200, int], Tuple[Tag200, int, Dict[str, str]] @@ -78,9 +80,9 @@ def put_tag(tag_id, sub_id, notes=None, version=None): # noqa: E501 Update a tag submission # noqa: E501 - :param tag_id: Existing tag id + :param tag_id: Numeric tag ID :type tag_id: - :param sub_id: Existing submission id for an existing tag + :param sub_id: Numeric submission ID :type sub_id: :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) :type notes: str diff --git a/tagbase_server/tagbase_server/models/__init__.py b/tagbase_server/tagbase_server/models/__init__.py index 8880414..5e9ac12 100644 --- a/tagbase_server/tagbase_server/models/__init__.py +++ b/tagbase_server/tagbase_server/models/__init__.py @@ -4,6 +4,10 @@ from __future__ import absolute_import # import models into model package +from tagbase_server.models.event200 import Event200 +from tagbase_server.models.event_put200 import EventPut200 +from tagbase_server.models.events200 import Events200 +from tagbase_server.models.events200_events_inner import Events200EventsInner from tagbase_server.models.ingest200 import Ingest200 from tagbase_server.models.response500 import Response500 from tagbase_server.models.tag200 import Tag200 diff --git a/tagbase_server/tagbase_server/models/base_model_.py b/tagbase_server/tagbase_server/models/base_model_.py index 4ef04ae..01f878e 100644 --- a/tagbase_server/tagbase_server/models/base_model_.py +++ b/tagbase_server/tagbase_server/models/base_model_.py @@ -8,7 +8,7 @@ T = typing.TypeVar("T") -class Model: +class Model(object): # openapiTypes: The key is attribute name and the # value is attribute type. openapi_types: typing.Dict[str, type] = {} diff --git a/tagbase_server/tagbase_server/models/event200.py b/tagbase_server/tagbase_server/models/event200.py new file mode 100644 index 0000000..1d53ea2 --- /dev/null +++ b/tagbase_server/tagbase_server/models/event200.py @@ -0,0 +1,344 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server import util + + +class Event200(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__( + self, + event_category=None, + event_id=None, + event_name=None, + event_notes=None, + event_status=None, + time_start=None, + time_end=None, + duration=None, + submission_id=None, + tag_id=None, + ): # noqa: E501 + """Event200 - a model defined in OpenAPI + + :param event_category: The event_category of this Event200. # noqa: E501 + :type event_category: str + :param event_id: The event_id of this Event200. # noqa: E501 + :type event_id: int + :param event_name: The event_name of this Event200. # noqa: E501 + :type event_name: str + :param event_notes: The event_notes of this Event200. # noqa: E501 + :type event_notes: str + :param event_status: The event_status of this Event200. # noqa: E501 + :type event_status: str + :param time_start: The time_start of this Event200. # noqa: E501 + :type time_start: str + :param time_end: The time_end of this Event200. # noqa: E501 + :type time_end: str + :param duration: The duration of this Event200. # noqa: E501 + :type duration: str + :param submission_id: The submission_id of this Event200. # noqa: E501 + :type submission_id: int + :param tag_id: The tag_id of this Event200. # noqa: E501 + :type tag_id: int + """ + self.openapi_types = { + "event_category": str, + "event_id": int, + "event_name": str, + "event_notes": str, + "event_status": str, + "time_start": str, + "time_end": str, + "duration": str, + "submission_id": int, + "tag_id": int, + } + + self.attribute_map = { + "event_category": "event_category", + "event_id": "event_id", + "event_name": "event_name", + "event_notes": "event_notes", + "event_status": "event_status", + "time_start": "time_start", + "time_end": "time_end", + "duration": "duration", + "submission_id": "submission_id", + "tag_id": "tag_id", + } + + self._event_category = event_category + self._event_id = event_id + self._event_name = event_name + self._event_notes = event_notes + self._event_status = event_status + self._time_start = time_start + self._time_end = time_end + self._duration = duration + self._submission_id = submission_id + self._tag_id = tag_id + + @classmethod + def from_dict(cls, dikt) -> "Event200": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The event200 of this Event200. # noqa: E501 + :rtype: Event200 + """ + return util.deserialize_model(dikt, cls) + + @property + def event_category(self): + """Gets the event_category of this Event200. + + ... # noqa: E501 + + :return: The event_category of this Event200. + :rtype: str + """ + return self._event_category + + @event_category.setter + def event_category(self, event_category): + """Sets the event_category of this Event200. + + ... # noqa: E501 + + :param event_category: The event_category of this Event200. + :type event_category: str + """ + + self._event_category = event_category + + @property + def event_id(self): + """Gets the event_id of this Event200. + + Unique numeric event ID associated with the ingested tag data file # noqa: E501 + + :return: The event_id of this Event200. + :rtype: int + """ + return self._event_id + + @event_id.setter + def event_id(self, event_id): + """Sets the event_id of this Event200. + + Unique numeric event ID associated with the ingested tag data file # noqa: E501 + + :param event_id: The event_id of this Event200. + :type event_id: int + """ + + self._event_id = event_id + + @property + def event_name(self): + """Gets the event_name of this Event200. + + ... # noqa: E501 + + :return: The event_name of this Event200. + :rtype: str + """ + return self._event_name + + @event_name.setter + def event_name(self, event_name): + """Sets the event_name of this Event200. + + ... # noqa: E501 + + :param event_name: The event_name of this Event200. + :type event_name: str + """ + + self._event_name = event_name + + @property + def event_notes(self): + """Gets the event_notes of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :return: The event_notes of this Event200. + :rtype: str + """ + return self._event_notes + + @event_notes.setter + def event_notes(self, event_notes): + """Sets the event_notes of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :param event_notes: The event_notes of this Event200. + :type event_notes: str + """ + + self._event_notes = event_notes + + @property + def event_status(self): + """Gets the event_status of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :return: The event_status of this Event200. + :rtype: str + """ + return self._event_status + + @event_status.setter + def event_status(self, event_status): + """Sets the event_status of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :param event_status: The event_status of this Event200. + :type event_status: str + """ + allowed_values = [ + "failed", + "finished", + "killed", + "migration", + "postmigration", + "premigration", + ] # noqa: E501 + if event_status not in allowed_values: + raise ValueError( + "Invalid value for `event_status` ({0}), must be one of {1}".format( + event_status, allowed_values + ) + ) + + self._event_status = event_status + + @property + def time_start(self): + """Gets the time_start of this Event200. + + Local datetime stamp at the time of the event start # noqa: E501 + + :return: The time_start of this Event200. + :rtype: str + """ + return self._time_start + + @time_start.setter + def time_start(self, time_start): + """Sets the time_start of this Event200. + + Local datetime stamp at the time of the event start # noqa: E501 + + :param time_start: The time_start of this Event200. + :type time_start: str + """ + + self._time_start = time_start + + @property + def time_end(self): + """Gets the time_end of this Event200. + + Local datetime stamp at the time of the event end # noqa: E501 + + :return: The time_end of this Event200. + :rtype: str + """ + return self._time_end + + @time_end.setter + def time_end(self, time_end): + """Sets the time_end of this Event200. + + Local datetime stamp at the time of the event end # noqa: E501 + + :param time_end: The time_end of this Event200. + :type time_end: str + """ + + self._time_end = time_end + + @property + def duration(self): + """Gets the duration of this Event200. + + The event duration e.g. different between 'time_start' and 'time_end' # noqa: E501 + + :return: The duration of this Event200. + :rtype: str + """ + return self._duration + + @duration.setter + def duration(self, duration): + """Sets the duration of this Event200. + + The event duration e.g. different between 'time_start' and 'time_end' # noqa: E501 + + :param duration: The duration of this Event200. + :type duration: str + """ + + self._duration = duration + + @property + def submission_id(self): + """Gets the submission_id of this Event200. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :return: The submission_id of this Event200. + :rtype: int + """ + return self._submission_id + + @submission_id.setter + def submission_id(self, submission_id): + """Sets the submission_id of this Event200. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :param submission_id: The submission_id of this Event200. + :type submission_id: int + """ + + self._submission_id = submission_id + + @property + def tag_id(self): + """Gets the tag_id of this Event200. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :return: The tag_id of this Event200. + :rtype: int + """ + return self._tag_id + + @tag_id.setter + def tag_id(self, tag_id): + """Sets the tag_id of this Event200. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :param tag_id: The tag_id of this Event200. + :type tag_id: int + """ + + self._tag_id = tag_id diff --git a/tagbase_server/tagbase_server/models/event_put200.py b/tagbase_server/tagbase_server/models/event_put200.py new file mode 100644 index 0000000..7e0d9bb --- /dev/null +++ b/tagbase_server/tagbase_server/models/event_put200.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server import util + + +class EventPut200(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, code=None, message=None): # noqa: E501 + """EventPut200 - a model defined in OpenAPI + + :param code: The code of this EventPut200. # noqa: E501 + :type code: str + :param message: The message of this EventPut200. # noqa: E501 + :type message: str + """ + self.openapi_types = {"code": str, "message": str} + + self.attribute_map = {"code": "code", "message": "message"} + + self._code = code + self._message = message + + @classmethod + def from_dict(cls, dikt) -> "EventPut200": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The eventPut200 of this EventPut200. # noqa: E501 + :rtype: EventPut200 + """ + return util.deserialize_model(dikt, cls) + + @property + def code(self): + """Gets the code of this EventPut200. + + HTTP status code # noqa: E501 + + :return: The code of this EventPut200. + :rtype: str + """ + return self._code + + @code.setter + def code(self, code): + """Sets the code of this EventPut200. + + HTTP status code # noqa: E501 + + :param code: The code of this EventPut200. + :type code: str + """ + + self._code = code + + @property + def message(self): + """Gets the message of this EventPut200. + + A string detailing specifics of the HTTP operation # noqa: E501 + + :return: The message of this EventPut200. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this EventPut200. + + A string detailing specifics of the HTTP operation # noqa: E501 + + :param message: The message of this EventPut200. + :type message: str + """ + + self._message = message diff --git a/tagbase_server/tagbase_server/models/events200.py b/tagbase_server/tagbase_server/models/events200.py new file mode 100644 index 0000000..36b67ad --- /dev/null +++ b/tagbase_server/tagbase_server/models/events200.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server.models.events200_events_inner import Events200EventsInner +from tagbase_server import util + +from tagbase_server.models.events200_events_inner import ( + Events200EventsInner, +) # noqa: E501 + + +class Events200(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, count=None, events=None): # noqa: E501 + """Events200 - a model defined in OpenAPI + + :param count: The count of this Events200. # noqa: E501 + :type count: int + :param events: The events of this Events200. # noqa: E501 + :type events: List[Events200EventsInner] + """ + self.openapi_types = {"count": int, "events": List[Events200EventsInner]} + + self.attribute_map = {"count": "count", "events": "events"} + + self._count = count + self._events = events + + @classmethod + def from_dict(cls, dikt) -> "Events200": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The events200 of this Events200. # noqa: E501 + :rtype: Events200 + """ + return util.deserialize_model(dikt, cls) + + @property + def count(self): + """Gets the count of this Events200. + + Total count of unique events # noqa: E501 + + :return: The count of this Events200. + :rtype: int + """ + return self._count + + @count.setter + def count(self, count): + """Sets the count of this Events200. + + Total count of unique events # noqa: E501 + + :param count: The count of this Events200. + :type count: int + """ + + self._count = count + + @property + def events(self): + """Gets the events of this Events200. + + List of unique numeric event IDs and associated tag and submission IDs # noqa: E501 + + :return: The events of this Events200. + :rtype: List[Events200EventsInner] + """ + return self._events + + @events.setter + def events(self, events): + """Sets the events of this Events200. + + List of unique numeric event IDs and associated tag and submission IDs # noqa: E501 + + :param events: The events of this Events200. + :type events: List[Events200EventsInner] + """ + + self._events = events diff --git a/tagbase_server/tagbase_server/models/events200_events_inner.py b/tagbase_server/tagbase_server/models/events200_events_inner.py new file mode 100644 index 0000000..c8599b2 --- /dev/null +++ b/tagbase_server/tagbase_server/models/events200_events_inner.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server import util + + +class Events200EventsInner(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, event_id=None, tag_id=None, submission_id=None): # noqa: E501 + """Events200EventsInner - a model defined in OpenAPI + + :param event_id: The event_id of this Events200EventsInner. # noqa: E501 + :type event_id: int + :param tag_id: The tag_id of this Events200EventsInner. # noqa: E501 + :type tag_id: int + :param submission_id: The submission_id of this Events200EventsInner. # noqa: E501 + :type submission_id: int + """ + self.openapi_types = {"event_id": int, "tag_id": int, "submission_id": int} + + self.attribute_map = { + "event_id": "event_id", + "tag_id": "tag_id", + "submission_id": "submission_id", + } + + self._event_id = event_id + self._tag_id = tag_id + self._submission_id = submission_id + + @classmethod + def from_dict(cls, dikt) -> "Events200EventsInner": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The events200_events_inner of this Events200EventsInner. # noqa: E501 + :rtype: Events200EventsInner + """ + return util.deserialize_model(dikt, cls) + + @property + def event_id(self): + """Gets the event_id of this Events200EventsInner. + + Unique numeric event ID associated with the ingested tag data file # noqa: E501 + + :return: The event_id of this Events200EventsInner. + :rtype: int + """ + return self._event_id + + @event_id.setter + def event_id(self, event_id): + """Sets the event_id of this Events200EventsInner. + + Unique numeric event ID associated with the ingested tag data file # noqa: E501 + + :param event_id: The event_id of this Events200EventsInner. + :type event_id: int + """ + + self._event_id = event_id + + @property + def tag_id(self): + """Gets the tag_id of this Events200EventsInner. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :return: The tag_id of this Events200EventsInner. + :rtype: int + """ + return self._tag_id + + @tag_id.setter + def tag_id(self, tag_id): + """Sets the tag_id of this Events200EventsInner. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :param tag_id: The tag_id of this Events200EventsInner. + :type tag_id: int + """ + + self._tag_id = tag_id + + @property + def submission_id(self): + """Gets the submission_id of this Events200EventsInner. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :return: The submission_id of this Events200EventsInner. + :rtype: int + """ + return self._submission_id + + @submission_id.setter + def submission_id(self, submission_id): + """Sets the submission_id of this Events200EventsInner. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :param submission_id: The submission_id of this Events200EventsInner. + :type submission_id: int + """ + + self._submission_id = submission_id diff --git a/tagbase_server/tagbase_server/models/ingest200.py b/tagbase_server/tagbase_server/models/ingest200.py index 3d220a7..b5cf3c7 100644 --- a/tagbase_server/tagbase_server/models/ingest200.py +++ b/tagbase_server/tagbase_server/models/ingest200.py @@ -43,7 +43,7 @@ def from_dict(cls, dikt) -> "Ingest200": :param dikt: A dict. :type: dict - :return: The Ingest200 of this Ingest200. # noqa: E501 + :return: The ingest200 of this Ingest200. # noqa: E501 :rtype: Ingest200 """ return util.deserialize_model(dikt, cls) diff --git a/tagbase_server/tagbase_server/models/response500.py b/tagbase_server/tagbase_server/models/response500.py index c2ff82c..a4ba4c5 100644 --- a/tagbase_server/tagbase_server/models/response500.py +++ b/tagbase_server/tagbase_server/models/response500.py @@ -54,7 +54,7 @@ def from_dict(cls, dikt) -> "Response500": :param dikt: A dict. :type: dict - :return: The Response500 of this Response500. # noqa: E501 + :return: The response500 of this Response500. # noqa: E501 :rtype: Response500 """ return util.deserialize_model(dikt, cls) @@ -109,7 +109,7 @@ def message(self, message): def more_info(self): """Gets the more_info of this Response500. - Additional details (if available) to diagnose the 500 response. # noqa: E501 + Additional details (if available) to diagnose the 500 response # noqa: E501 :return: The more_info of this Response500. :rtype: str @@ -120,7 +120,7 @@ def more_info(self): def more_info(self, more_info): """Sets the more_info of this Response500. - Additional details (if available) to diagnose the 500 response. # noqa: E501 + Additional details (if available) to diagnose the 500 response # noqa: E501 :param more_info: The more_info of this Response500. :type more_info: str diff --git a/tagbase_server/tagbase_server/models/tag200.py b/tagbase_server/tagbase_server/models/tag200.py index 70b67b0..7698fe7 100644 --- a/tagbase_server/tagbase_server/models/tag200.py +++ b/tagbase_server/tagbase_server/models/tag200.py @@ -36,7 +36,7 @@ def from_dict(cls, dikt) -> "Tag200": :param dikt: A dict. :type: dict - :return: The Tag200 of this Tag200. # noqa: E501 + :return: The tag200 of this Tag200. # noqa: E501 :rtype: Tag200 """ return util.deserialize_model(dikt, cls) @@ -45,7 +45,7 @@ def from_dict(cls, dikt) -> "Tag200": def tag(self): """Gets the tag of this Tag200. - List containing one or more submissions for a given tag # noqa: E501 + List containing submissions for a given tag # noqa: E501 :return: The tag of this Tag200. :rtype: List[Tag200TagInner] @@ -56,7 +56,7 @@ def tag(self): def tag(self, tag): """Sets the tag of this Tag200. - List containing one or more submissions for a given tag # noqa: E501 + List containing submissions for a given tag # noqa: E501 :param tag: The tag of this Tag200. :type tag: List[Tag200TagInner] diff --git a/tagbase_server/tagbase_server/models/tag200_tag_inner.py b/tagbase_server/tagbase_server/models/tag200_tag_inner.py index a46eae9..3f53775 100644 --- a/tagbase_server/tagbase_server/models/tag200_tag_inner.py +++ b/tagbase_server/tagbase_server/models/tag200_tag_inner.py @@ -76,7 +76,7 @@ def from_dict(cls, dikt) -> "Tag200TagInner": :param dikt: A dict. :type: dict - :return: The Tag200_tag_inner of this Tag200TagInner. # noqa: E501 + :return: The tag200_tag_inner of this Tag200TagInner. # noqa: E501 :rtype: Tag200TagInner """ return util.deserialize_model(dikt, cls) @@ -200,7 +200,7 @@ def submission_id(self, submission_id): def tag_id(self): """Gets the tag_id of this Tag200TagInner. - Unique numeric Tag ID associated with the ingested tag eTUFF data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :return: The tag_id of this Tag200TagInner. :rtype: int @@ -211,7 +211,7 @@ def tag_id(self): def tag_id(self, tag_id): """Sets the tag_id of this Tag200TagInner. - Unique numeric Tag ID associated with the ingested tag eTUFF data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :param tag_id: The tag_id of this Tag200TagInner. :type tag_id: int diff --git a/tagbase_server/tagbase_server/models/tag_put200.py b/tagbase_server/tagbase_server/models/tag_put200.py index 1f604cd..96fcf9f 100644 --- a/tagbase_server/tagbase_server/models/tag_put200.py +++ b/tagbase_server/tagbase_server/models/tag_put200.py @@ -36,7 +36,7 @@ def from_dict(cls, dikt) -> "TagPut200": :param dikt: A dict. :type: dict - :return: The TagPut200 of this TagPut200. # noqa: E501 + :return: The tagPut200 of this TagPut200. # noqa: E501 :rtype: TagPut200 """ return util.deserialize_model(dikt, cls) diff --git a/tagbase_server/tagbase_server/models/tags200.py b/tagbase_server/tagbase_server/models/tags200.py index 65ab227..721dda2 100644 --- a/tagbase_server/tagbase_server/models/tags200.py +++ b/tagbase_server/tagbase_server/models/tags200.py @@ -39,7 +39,7 @@ def from_dict(cls, dikt) -> "Tags200": :param dikt: A dict. :type: dict - :return: The Tags200 of this Tags200. # noqa: E501 + :return: The tags200 of this Tags200. # noqa: E501 :rtype: Tags200 """ return util.deserialize_model(dikt, cls) diff --git a/tagbase_server/tagbase_server/models/tags200_tags_inner.py b/tagbase_server/tagbase_server/models/tags200_tags_inner.py index f6819b3..2076970 100644 --- a/tagbase_server/tagbase_server/models/tags200_tags_inner.py +++ b/tagbase_server/tagbase_server/models/tags200_tags_inner.py @@ -36,7 +36,7 @@ def from_dict(cls, dikt) -> "Tags200TagsInner": :param dikt: A dict. :type: dict - :return: The Tags200_tags_inner of this Tags200TagsInner. # noqa: E501 + :return: The tags200_tags_inner of this Tags200TagsInner. # noqa: E501 :rtype: Tags200TagsInner """ return util.deserialize_model(dikt, cls) @@ -45,7 +45,7 @@ def from_dict(cls, dikt) -> "Tags200TagsInner": def tag_id(self): """Gets the tag_id of this Tags200TagsInner. - Unique numeric Tag ID associated with the ingested tag data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :return: The tag_id of this Tags200TagsInner. :rtype: int @@ -56,7 +56,7 @@ def tag_id(self): def tag_id(self, tag_id): """Sets the tag_id of this Tags200TagsInner. - Unique numeric Tag ID associated with the ingested tag data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :param tag_id: The tag_id of this Tags200TagsInner. :type tag_id: int diff --git a/tagbase_server/tagbase_server/openapi/openapi.yaml b/tagbase_server/tagbase_server/openapi/openapi.yaml index 2edd778..f9f6ddd 100644 --- a/tagbase_server/tagbase_server/openapi/openapi.yaml +++ b/tagbase_server/tagbase_server/openapi/openapi.yaml @@ -4,10 +4,10 @@ info: email: tagtuna@gmail.com name: Tagbase Dev Team url: https://github.com/tagbase/tagbase-server/issues - description: "tagbse-server provides HTTP endpoints for ingestion of various files\ - \ \\\ninto a Tagbase SQL database. Input file support currently includes eTUFF\ - \ (see [here](https://doi.org/10.6084/m9.figshare.10032848.v4) \\\nand [here](https://doi.org/10.6084/m9.figshare.10159820.v1)).\ - \ The REST API complies with [OpenAPI v3.0.3](https://spec.openapis.org/oas/v3.0.3.html).\n" + description: | + tagbse-server provides HTTP endpoints for ingestion of various files \ + into a Tagbase SQL database. Input file support currently includes eTUFF (see [here](https://doi.org/10.6084/m9.figshare.10032848.v4) \ + and [here](https://doi.org/10.6084/m9.figshare.10159820.v1)). license: name: Apache License v2.0 url: https://www.apache.org/licenses/LICENSE-2.0 @@ -20,11 +20,107 @@ servers: - description: ICCAT Test tagbase-server url: https://162.13.162.49/tagbase/api/v0.7.0 tags: -- description: Ingestion operations. +- description: Event Operations + name: events +- description: Ingestion operations name: ingest -- description: Tag Operations. +- description: Tag Operations name: tags paths: + /events: + get: + description: Get information about all events + operationId: list_all_events + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: A list containing all events. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about all events + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + /events/{event_id}: + get: + description: Get information about an individual event + operationId: get_event + parameters: + - description: Numeric event ID + explode: true + in: path + name: event_id + required: true + schema: + type: number + style: simple + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/event200' + description: Information about an individual tag + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about an individual event + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + put: + description: Update notes for an event + operationId: put_event + parameters: + - description: Numeric event ID + explode: true + in: path + name: event_id + required: true + schema: + type: number + style: simple + - description: "Free-form text field where details of submitted eTUFF file for\ + \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ + \ and measurements + primary position data, or just secondary solution-positional\ + \ meta/data)" + explode: true + in: query + name: notes + required: false + schema: + maxLength: 10000 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/eventPut200' + description: A success message confirming ingestion. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Update the 'notes' associated with a event + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller /ingest: get: description: Get network accessible file and execute ingestion @@ -84,13 +180,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get network accessible file and execute ingestion tags: @@ -100,7 +196,8 @@ paths: description: Post a local file and perform a ingest operation operationId: ingest_post parameters: - - description: "Free-form text field to explicitly define the name of the file to be persisted" + - description: Free-form text field to explicitly define the name of the file + to be persisted explode: true in: query name: filename @@ -152,7 +249,7 @@ paths: content: application/octet-stream: schema: - description: compressed binary file containing one or more eTUFF files + description: Compressed binary file containing one or more eTUFF files format: binary maxLength: 1000000000 minLength: 1 @@ -171,13 +268,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Post a local file and perform a ingest operation tags: @@ -193,13 +290,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tags200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tags200' + description: A list of all tags. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about all tags tags: @@ -210,7 +307,7 @@ paths: description: Get information about an individual tag operationId: get_tag parameters: - - description: Existing tag id + - description: Numeric tag ID explode: true in: path name: tag_id @@ -223,13 +320,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tag200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tag200' + description: Information about an individual tag. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about an individual tag tags: @@ -254,7 +351,7 @@ paths: pattern: ^(?!\s*$).+ type: string style: form - - description: Existing tag id + - description: Numeric tag ID explode: true in: path name: tag_id @@ -262,7 +359,7 @@ paths: schema: type: number style: simple - - description: Existing submission id for an existing tag + - description: Numeric submission ID explode: true in: path name: sub_id @@ -286,22 +383,70 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TagPut200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tagPut200' + description: Message confirming successful data update "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' - description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml summary: Update the 'notes' and/or 'version' associated with a tag submission tags: - tags x-openapi-router-controller: tagbase_server.controllers.tags_controller + /tags/{tag_id}/subs/{sub_id}/events: + get: + description: Get all events for a given tag submission + operationId: list_events + parameters: + - description: Numeric tag ID + explode: true + in: path + name: tag_id + required: true + schema: + type: number + style: simple + - description: Numeric submission ID + explode: true + in: path + name: sub_id + required: true + schema: + type: number + style: simple + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: Message confirming successful data update + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml + summary: Get all events for a given tag submission + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller components: parameters: + eventId: + description: Numeric event ID + explode: true + in: path + name: event_id + required: true + schema: + type: number + style: simple filename: - description: Free-form text field to explicitly define the name of the file to be persisted + description: Free-form text field to explicitly define the name of the file + to be persisted explode: true in: query name: filename @@ -328,7 +473,7 @@ components: type: string style: form subId: - description: Existing submission id for an existing tag + description: Numeric submission ID explode: true in: path name: sub_id @@ -337,7 +482,7 @@ components: type: number style: simple tagId: - description: Existing tag id + description: Numeric tag ID explode: true in: path name: tag_id @@ -376,13 +521,134 @@ components: example: eTUFF-sailfish-117259_2.txt title: filename type: string - Ingest200: + event200: + description: Information for a given event + example: + event_id: 1 + tag_id: 1 + submission_id: 1 + event_category: submission + event_name: new tag submission + time_start: 2022-04-01T04:58:21.319061+00:00 + time_end: 2022-04-01T04:59:21.319061+00:00 + duration: 0:01:00 + event_status: finished + event_notes: Some user defined notes... + properties: + event_category: + description: '...' + example: '...' + type: string + event_id: + description: Unique numeric event ID associated with the ingested tag data + file + example: 1 + title: event_id + type: integer + event_name: + description: '...' + example: '...' + type: string + event_notes: + description: Free-form text field where details of the event can be optionally + entered by the client + example: "The event represents a data anomaly with file XYZ because of ...,\ + \ this has been fixed in version ABC." + type: string + event_status: + description: Free-form text field where details of the event can be optionally + entered by the client + enum: + - failed + - finished + - killed + - migration + - postmigration + - premigration + example: failed + type: string + time_start: + description: Local datetime stamp at the time of the event start + example: 2022-04-01T04:58:21.319061+00:00 + type: string + time_end: + description: Local datetime stamp at the time of the event end + example: 2022-04-01T04:58:21.319061+00:00 + type: string + duration: + description: The event duration e.g. different between 'time_start' and + 'time_end' + example: 0:01:00 + type: string + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + tag_id: + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer + title: event200 + type: object + eventPut200: + description: Event update response + example: + code: "200" + message: Notes for event '1' successfully updated. + properties: + code: + description: HTTP status code + example: "200" + type: string + message: + description: A string detailing specifics of the HTTP operation + example: Notes for event '1' successfully updated. + type: string + title: eventPut200 + type: object + event_id: + description: Unique numeric event ID associated with the ingested tag data file + example: 1 + title: event_id + type: integer + events200: + description: Response detailing all available events + example: + count: 3 + events: + - event_id: 1 + tag_id: 1 + submission_id: 1 + - event_id: 2 + tag_id: 1 + submission_id: 1 + - event_id: 3 + tag_id: 1 + submission_id: 1 + properties: + count: + description: Total count of unique events + example: 3 + type: integer + events: + description: List of unique numeric event IDs and associated tag and submission + IDs + items: + $ref: '#/components/schemas/events200_events_inner' + type: array + title: tags200 + type: object + ingest200: description: HTTP 200 success response example: code: "200" elapsed: 0:00:06.506691 message: Data file eTUFF-sailfish-117259.txt successfully ingested into Tagbase - DB. + DB properties: code: description: HTTP status code @@ -394,11 +660,11 @@ components: message: description: A string detailing specifics of an HTTP operation example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string - title: Ingest200 + title: ingest200 type: object - Response500: + response500: description: 500 Internal Server Error example: code: "200" @@ -413,19 +679,25 @@ components: message: description: A string detailing specifics of the HTTP 500 response example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string more_info: - description: Additional details (if available) to diagnose the 500 response. + description: Additional details (if available) to diagnose the 500 response example: https://httpwg.org/specs/rfc7231.html#status.500 type: string trace: description: Trace diagnostic information related to the response example: 123e4567-e89b-12d3-a456-426614174000 type: string - title: Response500 + title: response500 type: object - Tag200: + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + tag200: description: Information for a given tag example: tag: @@ -456,13 +728,13 @@ components: version: "2" properties: tag: - description: List containing one or more submissions for a given tag + description: List containing submissions for a given tag items: - $ref: '#/components/schemas/Tag200_tag_inner' + $ref: '#/components/schemas/tag200_tag_inner' type: array - title: Tag200 + title: tag200 type: object - TagPut200: + tagPut200: description: HTTP 200 success response example: code: "200" @@ -476,9 +748,14 @@ components: description: A string detailing specifics of an HTTP operation example: Tag '1' submission '5' successfully updated. type: string - title: TagPut200 + title: tagPut200 type: object - Tags200: + tag_id: + description: Unique numeric tag ID associated with the ingested tag data file + example: 1 + title: tag_id + type: integer + tags200: description: Response detailing all available unique tags and associated filename example: count: 2 @@ -495,11 +772,38 @@ components: tags: description: List of unique numeric Tag IDs and associated filename items: - $ref: '#/components/schemas/Tags200_tags_inner' + $ref: '#/components/schemas/tags200_tags_inner' type: array - title: Tags200 + title: tags200 + type: object + events200_events_inner: + description: Unique numeric event ID and associated tag and submission IDs + example: + event_id: 1 + tag_id: 1 + submission_id: 1 + properties: + event_id: + description: Unique numeric event ID associated with the ingested tag data + file + example: 1 + title: event_id + type: integer + tag_id: + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + title: events200_events_inner type: object - Tag200_tag_inner: + tag200_tag_inner: properties: date_time: description: Local datetime stamp at the time of eTUFF tag data file ingestion @@ -539,9 +843,9 @@ components: title: submission_id type: integer tag_id: - description: Unique numeric Tag ID associated with the ingested tag eTUFF - data file - example: 3 + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 title: tag_id type: integer version: @@ -549,9 +853,9 @@ components: example: "1" title: version type: string - title: Tag200_tag_inner + title: tag200_tag_inner type: object - Tags200_tags_inner: + tags200_tags_inner: description: Unique numeric Tag ID associated with the ingested tag eTUFF data file example: @@ -559,13 +863,15 @@ components: filename: eTUFF-sailfish-117259_2.txt properties: tag_id: - description: Unique numeric Tag ID associated with the ingested tag data + description: Unique numeric tag ID associated with the ingested tag data file example: 1 + title: tag_id type: integer filename: description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259.txt + example: eTUFF-sailfish-117259_2.txt + title: filename type: string - title: Tags200_tags_inner + title: tags200_tags_inner type: object diff --git a/tagbase_server/tagbase_server/test/test_events_controller.py b/tagbase_server/tagbase_server/test/test_events_controller.py new file mode 100644 index 0000000..705ae50 --- /dev/null +++ b/tagbase_server/tagbase_server/test/test_events_controller.py @@ -0,0 +1,83 @@ +# coding: utf-8 + +from __future__ import absolute_import +import unittest + +from flask import json +from six import BytesIO + +from tagbase_server.models.event200 import Event200 # noqa: E501 +from tagbase_server.models.event_put200 import EventPut200 # noqa: E501 +from tagbase_server.models.events200 import Events200 # noqa: E501 +from tagbase_server.models.response500 import Response500 # noqa: E501 +from tagbase_server.test import BaseTestCase + + +class TestEventsController(BaseTestCase): + """EventsController integration test stubs""" + + def test_get_event(self): + """Test case for get_event + + Get information about an individual event + """ + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/events/{event_id}".format(event_id=3.4), + method="GET", + headers=headers, + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + def test_list_all_events(self): + """Test case for list_all_events + + Get information about all events + """ + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/events", method="GET", headers=headers + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + def test_list_events(self): + """Test case for list_events + + Get all events for a given tag submission + """ + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/tags/{tag_id}/subs/{sub_id}/events".format( + tag_id=3.4, sub_id=3.4 + ), + method="GET", + headers=headers, + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + def test_put_event(self): + """Test case for put_event + + Update the 'notes' associated with a event + """ + query_string = [("notes", "notes_example")] + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/events/{event_id}".format(event_id=3.4), + method="PUT", + headers=headers, + query_string=query_string, + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + +if __name__ == "__main__": + unittest.main() diff --git a/tagbase_server/tagbase_server/utils/db_utils.py b/tagbase_server/tagbase_server/utils/db_utils.py index eb19ae8..ab2e1fa 100644 --- a/tagbase_server/tagbase_server/utils/db_utils.py +++ b/tagbase_server/tagbase_server/utils/db_utils.py @@ -13,7 +13,7 @@ def connect(): if they occur. :rtype: connection """ - logger.debug("Attempting connection to TagbaseDB...") + logger.info("Attempting connection to TagbaseDB...") try: conn = psycopg2.connect( dbname="tagbase", @@ -28,12 +28,65 @@ def connect(): { "code": "500", "message": "Encountered psycopg2.OperationalError when attempting to establish a connection " - "to the Tagbase PostgreSQL database.", + "to the Tagbase database.", "more_info": "Contact the service administrator - {email}".format( email=os.getenv("PGADMIN_DEFAULT_EMAIL") ), "trace": poe, } ) - logger.debug("Successfully connected to TagbaseDB.") + logger.info("Successfully connected to TagbaseDB.") return conn + + +def create_event(event_category=None, event_id=None, event_name=None, event_status=None, time_start=None): + """ + Create a new event in the events_log table. Note the event_id UUID is not automatically generated. + It must be passed to this function call. + """ + logger.info("Creating new event: %s in events log...", event_id) + event_conn = connect() + with event_conn: + with event_conn.cursor() as event_cur: + event_cur.execute( + "INSERT INTO events_log (event_id, event_category, event_name, time_start, event_status) " + "VALUES %s, %s, %s, %s, %s)", + ( + event_id, + event_category, + event_name, + time_start, + event_status + ), + ) + logger.info( + "Successfully created new event: '%s'", + event_id, + ) + event_conn.commit() + event_cur.close() + event_conn.close() + + +def update_event(duration=None, event_id=None, event_status=None, submission_id=None, tag_id=None, time_end=None): + """ + Update existing event in the events_log table with new data. + """ + logger.debug("Updating event: '%s' in events log...", event_id,) + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "UPDATE events_log " + "SET submission_id = %s, tag_id = %s, event_id = %s, time_end = %s, duration = %s, event_status = %s" + " WHERE event_id = %s", + (submission_id, tag_id, event_id, time_end, duration, event_status, event_id), + ) + logger.info( + "Successfully updated event: '%s'", + event_id, + ) + conn.commit() + cur.close() + conn.close() + diff --git a/tagbase_server/tagbase_server/utils/io_utils.py b/tagbase_server/tagbase_server/utils/io_utils.py index f9e67a1..438c0be 100644 --- a/tagbase_server/tagbase_server/utils/io_utils.py +++ b/tagbase_server/tagbase_server/utils/io_utils.py @@ -46,7 +46,6 @@ def process_get_input_data(file): f.write(chunk) data_file = filename - logger.info(data_file) return data_file @@ -68,7 +67,6 @@ def process_post_input_data(filename, body): with open(filepath, mode="wb") as f: f.write(data) f.close() - logger.info(filepath) return filepath diff --git a/tagbase_server/tagbase_server/utils/processing_utils.py b/tagbase_server/tagbase_server/utils/processing_utils.py index c133438..9588bdd 100644 --- a/tagbase_server/tagbase_server/utils/processing_utils.py +++ b/tagbase_server/tagbase_server/utils/processing_utils.py @@ -3,6 +3,7 @@ from datetime import datetime as dt from io import StringIO import time +import uuid import pandas as pd import psycopg2.extras @@ -11,7 +12,7 @@ from slack_sdk.errors import SlackApiError from tzlocal import get_localzone -from tagbase_server.utils.db_utils import connect +from tagbase_server.utils.db_utils import connect, create_event, update_event logger = logging.getLogger(__name__) slack_token = os.environ.get("SLACK_BOT_TOKEN", "") @@ -22,6 +23,10 @@ def process_global_attributes( line, cur, submission_id, metadata, submission_filename, line_counter ): + event_id = uuid.uuid4() + global_start = time.perf_counter() + create_event(event_category="metadata", event_id=event_id, event_name="populating metadata for new tag submission", + event_status="running", time_start=start) logger.debug("Processing global attribute: %s", line) tokens = line.strip()[1:].split(" = ") logger.debug("Processing token: %s", tokens) @@ -47,10 +52,19 @@ def process_global_attributes( str_submission_id = str(submission_id) str_row = str(rows[0][0]) metadata.append((str_submission_id, str_row, tokens[1])) + global_finish = time.perf_counter() + global_elapsed = round(finish - start, 2) + submission_id = cur.fetchone()[0] + update_event(duration=global_elapsed, event_id=event_id, event_status="finished", submission_id=submission_id, + tag_id=submission_id, time_end=global_finish) def process_etuff_file(file, version=None, notes=None): + logger.info("Started processing: %s", file) + event_id = uuid.uuid4() start = time.perf_counter() + create_event(event_category="submission", event_id=event_id, event_name="new tag submission", + event_status="running", time_start=start) submission_filename = file # full path name is now preferred rather than - file[file.rindex("/") + 1 :] logger.info( "Processing etuff file: %s", @@ -76,13 +90,18 @@ def process_etuff_file(file, version=None, notes=None): "Successful INSERT of '%s' into 'submission' table.", submission_filename, ) - + sub_finish = time.perf_counter() + sub_elapsed = round(finish - start, 2) cur.execute("SELECT currval('submission_submission_id_seq')") submission_id = cur.fetchone()[0] + update_event(duration=sub_elapsed, event_id=event_id, event_status="finished", submission_id=submission_id, + tag_id=submission_id, time_end=sub_finish) metadata = [] proc_obs = [] s_time = time.perf_counter() + create_event(event_category="submission", event_id=event_id, event_name="new tag submission", + event_status="running", time_start=start) with open(file, "rb") as data: lines = [line.decode("utf-8", "ignore") for line in data.readlines()] variable_lookup = {} From 09a44fccfe0f6b820777597be048b3d62bcbf9fc Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Thu, 26 Jan 2023 03:14:43 -0800 Subject: [PATCH 06/12] ISSUE-174 Create events_log table --- .../tagbase_server/utils/db_utils.py | 41 +++++++++++----- .../tagbase_server/utils/processing_utils.py | 47 +++++++++++++++---- 2 files changed, 66 insertions(+), 22 deletions(-) diff --git a/tagbase_server/tagbase_server/utils/db_utils.py b/tagbase_server/tagbase_server/utils/db_utils.py index ab2e1fa..304e569 100644 --- a/tagbase_server/tagbase_server/utils/db_utils.py +++ b/tagbase_server/tagbase_server/utils/db_utils.py @@ -39,7 +39,13 @@ def connect(): return conn -def create_event(event_category=None, event_id=None, event_name=None, event_status=None, time_start=None): +def create_event( + event_category=None, + event_id=None, + event_name=None, + event_status=None, + time_start=None, +): """ Create a new event in the events_log table. Note the event_id UUID is not automatically generated. It must be passed to this function call. @@ -51,13 +57,7 @@ def create_event(event_category=None, event_id=None, event_name=None, event_stat event_cur.execute( "INSERT INTO events_log (event_id, event_category, event_name, time_start, event_status) " "VALUES %s, %s, %s, %s, %s)", - ( - event_id, - event_category, - event_name, - time_start, - event_status - ), + (event_id, event_category, event_name, time_start, event_status), ) logger.info( "Successfully created new event: '%s'", @@ -68,11 +68,21 @@ def create_event(event_category=None, event_id=None, event_name=None, event_stat event_conn.close() -def update_event(duration=None, event_id=None, event_status=None, submission_id=None, tag_id=None, time_end=None): +def update_event( + duration=None, + event_id=None, + event_status=None, + submission_id=None, + tag_id=None, + time_end=None, +): """ Update existing event in the events_log table with new data. """ - logger.debug("Updating event: '%s' in events log...", event_id,) + logger.debug( + "Updating event: '%s' in events log...", + event_id, + ) conn = connect() with conn: with conn.cursor() as cur: @@ -80,7 +90,15 @@ def update_event(duration=None, event_id=None, event_status=None, submission_id= "UPDATE events_log " "SET submission_id = %s, tag_id = %s, event_id = %s, time_end = %s, duration = %s, event_status = %s" " WHERE event_id = %s", - (submission_id, tag_id, event_id, time_end, duration, event_status, event_id), + ( + submission_id, + tag_id, + event_id, + time_end, + duration, + event_status, + event_id, + ), ) logger.info( "Successfully updated event: '%s'", @@ -89,4 +107,3 @@ def update_event(duration=None, event_id=None, event_status=None, submission_id= conn.commit() cur.close() conn.close() - diff --git a/tagbase_server/tagbase_server/utils/processing_utils.py b/tagbase_server/tagbase_server/utils/processing_utils.py index 9588bdd..b9329d8 100644 --- a/tagbase_server/tagbase_server/utils/processing_utils.py +++ b/tagbase_server/tagbase_server/utils/processing_utils.py @@ -25,8 +25,13 @@ def process_global_attributes( ): event_id = uuid.uuid4() global_start = time.perf_counter() - create_event(event_category="metadata", event_id=event_id, event_name="populating metadata for new tag submission", - event_status="running", time_start=start) + create_event( + event_category="metadata", + event_id=event_id, + event_name="populating metadata for new tag submission", + event_status="running", + time_start=start, + ) logger.debug("Processing global attribute: %s", line) tokens = line.strip()[1:].split(" = ") logger.debug("Processing token: %s", tokens) @@ -55,16 +60,27 @@ def process_global_attributes( global_finish = time.perf_counter() global_elapsed = round(finish - start, 2) submission_id = cur.fetchone()[0] - update_event(duration=global_elapsed, event_id=event_id, event_status="finished", submission_id=submission_id, - tag_id=submission_id, time_end=global_finish) + update_event( + duration=global_elapsed, + event_id=event_id, + event_status="finished", + submission_id=submission_id, + tag_id=submission_id, + time_end=global_finish, + ) def process_etuff_file(file, version=None, notes=None): logger.info("Started processing: %s", file) event_id = uuid.uuid4() start = time.perf_counter() - create_event(event_category="submission", event_id=event_id, event_name="new tag submission", - event_status="running", time_start=start) + create_event( + event_category="submission", + event_id=event_id, + event_name="new tag submission", + event_status="running", + time_start=start, + ) submission_filename = file # full path name is now preferred rather than - file[file.rindex("/") + 1 :] logger.info( "Processing etuff file: %s", @@ -94,14 +110,25 @@ def process_etuff_file(file, version=None, notes=None): sub_elapsed = round(finish - start, 2) cur.execute("SELECT currval('submission_submission_id_seq')") submission_id = cur.fetchone()[0] - update_event(duration=sub_elapsed, event_id=event_id, event_status="finished", submission_id=submission_id, - tag_id=submission_id, time_end=sub_finish) + update_event( + duration=sub_elapsed, + event_id=event_id, + event_status="finished", + submission_id=submission_id, + tag_id=submission_id, + time_end=sub_finish, + ) metadata = [] proc_obs = [] s_time = time.perf_counter() - create_event(event_category="submission", event_id=event_id, event_name="new tag submission", - event_status="running", time_start=start) + create_event( + event_category="submission", + event_id=event_id, + event_name="new tag submission", + event_status="running", + time_start=start, + ) with open(file, "rb") as data: lines = [line.decode("utf-8", "ignore") for line in data.readlines()] variable_lookup = {} From 3fdfdedf9e6b89d44249070eea2013f4184edac1 Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Mon, 6 Feb 2023 21:23:19 -0800 Subject: [PATCH 07/12] ISSUE-174 Create events_log table --- docker-compose.yml | 50 ++++++------- services/zeromq/Dockerfile | 14 ++++ services/zeromq/db_utils.py | 0 services/zeromq/zeromq_server.py | 41 +++++++++++ .../tagbase_server/utils/db_utils.py | 70 ------------------- 5 files changed, 80 insertions(+), 95 deletions(-) create mode 100644 services/zeromq/Dockerfile create mode 100644 services/zeromq/db_utils.py create mode 100644 services/zeromq/zeromq_server.py diff --git a/docker-compose.yml b/docker-compose.yml index 37fe9be..77da7f9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -82,31 +82,6 @@ services: networks: - internal-network restart: unless-stopped - # postgres: - # build: - # context: ./services/postgres - # environment: - # - PGDATA=/var/lib/postgresql/data/pgdata - # - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - # expose: - # - ${POSTGRES_PORT} - # healthcheck: - # test: ["CMD-SHELL", "pg_isready -d tagbase -h postgres -U tagbase"] - # interval: 15s - # timeout: 5s - # retries: 5 - # start_period: 15s - # hostname: postgres - # labels: - # "docker_compose_diagram.cluster": "Internal Network" - # "docker_compose_diagram.description": "Tagbase PostgreSQL server" - # networks: - # - internal-network - # ports: - # - ${POSTGRES_PORT}:${POSTGRES_PORT} - # restart: unless-stopped - # volumes: - # - ./postgres-data:/var/lib/postgresql/data postgis: environment: - ALLOW_IP_RANGE=0.0.0.0/0 @@ -174,5 +149,30 @@ services: restart: unless-stopped volumes: - ./logs:/usr/src/app/logs + zeromq: + build: + context: ./services/zeromq + depends_on: + postgis: + condition: service_healthy + tagbase_server: + condition: service_healthy + environment: + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_PORT=${POSTGRES_PORT} + expose: + - 5555 + hostname: zeromq + labels: + "docker_compose_diagram.cluster": "Internal Network" + "docker_compose_diagram.description": "zeromq messaging service" + "docker_compose_diagram.icon": "zeromq" + links: + - postgis + networks: + - internal-network + restart: unless-stopped + # volumes: + # - ./logs:/usr/src/app/logs networks: internal-network: \ No newline at end of file diff --git a/services/zeromq/Dockerfile b/services/zeromq/Dockerfile new file mode 100644 index 0000000..2ea7261 --- /dev/null +++ b/services/zeromq/Dockerfile @@ -0,0 +1,14 @@ +FROM ubuntu:22.04 + +RUN apt-get update +RUN apt-get install -y --force-yes python python-dev python-setuptools software-properties-common gcc python-pip +RUN apt-get clean all + +RUN pip install pyzmq + +ADD zeromq_server.py /tmp/zeromq_server.py + +# Zmq Sub Server +EXPOSE 5555 + +CMD ["python","/tmp/zmqserver.py"] diff --git a/services/zeromq/db_utils.py b/services/zeromq/db_utils.py new file mode 100644 index 0000000..e69de29 diff --git a/services/zeromq/zeromq_server.py b/services/zeromq/zeromq_server.py new file mode 100644 index 0000000..4712160 --- /dev/null +++ b/services/zeromq/zeromq_server.py @@ -0,0 +1,41 @@ +import signal +import zmq + +logging.basicConfig(filename='subscriber.log', level=logging.INFO) + +signal.signal(signal.SIGINT, signal.SIG_DFL) + +context = zmq.Context() + +socket = context.socket(zmq.SUB) +socket.connect('tcp://localhost:5555') +socket.setsockopt(zmq.SUBSCRIBE, b'event_log') + + +def process_topic(topic=None): + import db_utils.py + if topic is 'event_log/create': + db_utils.create_event( + event_category=msg_parts[0], + event_id=msg_parts[1], + event_name=msg_parts[2], + event_status=msg_parts[3], + time_start=msg_parts[4], + ) + else: + db_utils.update_event( + duration=msg_parts[0], + event_id=msg_parts[1], + event_status=msg_parts[2], + submission_id=msg_parts[3], + tag_id=msg_parts[4], + time_end=msg_parts[5], + ) + +while True: + message = socket.recv() + topic, messagedata = string.split() + msg_parts = messagedata.split(' ') + logging.info('topic: {} message: {} - {}'.format(topic, messagedata, time.strftime("%Y-%m-%d %H:%M"))) + process_topic(topic) + \ No newline at end of file diff --git a/tagbase_server/tagbase_server/utils/db_utils.py b/tagbase_server/tagbase_server/utils/db_utils.py index 304e569..8dfe207 100644 --- a/tagbase_server/tagbase_server/utils/db_utils.py +++ b/tagbase_server/tagbase_server/utils/db_utils.py @@ -37,73 +37,3 @@ def connect(): ) logger.info("Successfully connected to TagbaseDB.") return conn - - -def create_event( - event_category=None, - event_id=None, - event_name=None, - event_status=None, - time_start=None, -): - """ - Create a new event in the events_log table. Note the event_id UUID is not automatically generated. - It must be passed to this function call. - """ - logger.info("Creating new event: %s in events log...", event_id) - event_conn = connect() - with event_conn: - with event_conn.cursor() as event_cur: - event_cur.execute( - "INSERT INTO events_log (event_id, event_category, event_name, time_start, event_status) " - "VALUES %s, %s, %s, %s, %s)", - (event_id, event_category, event_name, time_start, event_status), - ) - logger.info( - "Successfully created new event: '%s'", - event_id, - ) - event_conn.commit() - event_cur.close() - event_conn.close() - - -def update_event( - duration=None, - event_id=None, - event_status=None, - submission_id=None, - tag_id=None, - time_end=None, -): - """ - Update existing event in the events_log table with new data. - """ - logger.debug( - "Updating event: '%s' in events log...", - event_id, - ) - conn = connect() - with conn: - with conn.cursor() as cur: - cur.execute( - "UPDATE events_log " - "SET submission_id = %s, tag_id = %s, event_id = %s, time_end = %s, duration = %s, event_status = %s" - " WHERE event_id = %s", - ( - submission_id, - tag_id, - event_id, - time_end, - duration, - event_status, - event_id, - ), - ) - logger.info( - "Successfully updated event: '%s'", - event_id, - ) - conn.commit() - cur.close() - conn.close() From 3cd32188f09a28d3148898d65ef79e7719b3eca0 Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Mon, 6 Feb 2023 21:23:40 -0800 Subject: [PATCH 08/12] ISSUE-174 Create events_log table --- services/zeromq/zeromq_server.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/services/zeromq/zeromq_server.py b/services/zeromq/zeromq_server.py index 4712160..6a3dbcc 100644 --- a/services/zeromq/zeromq_server.py +++ b/services/zeromq/zeromq_server.py @@ -1,20 +1,21 @@ import signal import zmq -logging.basicConfig(filename='subscriber.log', level=logging.INFO) +logging.basicConfig(filename="subscriber.log", level=logging.INFO) signal.signal(signal.SIGINT, signal.SIG_DFL) context = zmq.Context() socket = context.socket(zmq.SUB) -socket.connect('tcp://localhost:5555') -socket.setsockopt(zmq.SUBSCRIBE, b'event_log') +socket.connect("tcp://localhost:5555") +socket.setsockopt(zmq.SUBSCRIBE, b"event_log") def process_topic(topic=None): import db_utils.py - if topic is 'event_log/create': + + if topic is "event_log/create": db_utils.create_event( event_category=msg_parts[0], event_id=msg_parts[1], @@ -32,10 +33,14 @@ def process_topic(topic=None): time_end=msg_parts[5], ) + while True: message = socket.recv() topic, messagedata = string.split() - msg_parts = messagedata.split(' ') - logging.info('topic: {} message: {} - {}'.format(topic, messagedata, time.strftime("%Y-%m-%d %H:%M"))) + msg_parts = messagedata.split(" ") + logging.info( + "topic: {} message: {} - {}".format( + topic, messagedata, time.strftime("%Y-%m-%d %H:%M") + ) + ) process_topic(topic) - \ No newline at end of file From 8b9a7cd0aafea5c8fb0a53357ff0b26c906482d7 Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Sun, 12 Feb 2023 13:36:31 -0800 Subject: [PATCH 09/12] ISSUE-174 Create events_log table --- docker-compose.yml | 75 +++++++----- openapi.yaml | 21 ++-- rabbitmq/data/.bash_history | 10 ++ rabbitmq/data/.erlang.cookie | 1 + .../data/mnesia/rabbit@rabbitmq-feature_flags | 6 + .../rabbit@rabbitmq/cluster_nodes.config | 1 + .../coordination/rabbit@rabbitmq/00000024.wal | 1 + .../coordination/rabbit@rabbitmq/meta.dets | Bin 0 -> 5464 bytes .../coordination/rabbit@rabbitmq/names.dets | Bin 0 -> 5464 bytes .../vhosts/628WB79CIFDYO9LJI6DKMI09L/.config | 2 + .../vhosts/628WB79CIFDYO9LJI6DKMI09L/.vhost | 1 + .../msg_store_persistent/0.rdq | 0 .../msg_store_persistent/clean.dot | 2 + .../msg_store_persistent/file_summary.ets | Bin 0 -> 607 bytes .../msg_store_persistent/msg_store_index.ets | Bin 0 -> 533 bytes .../msg_store_transient/0.rdq | 0 .../msg_store_transient/clean.dot | 2 + .../msg_store_transient/file_summary.ets | Bin 0 -> 607 bytes .../msg_store_transient/msg_store_index.ets | Bin 0 -> 533 bytes .../628WB79CIFDYO9LJI6DKMI09L/recovery.dets | Bin 0 -> 6000 bytes .../rabbit@rabbitmq/nodes_running_at_shutdown | 1 + .../quorum/rabbit@rabbitmq/00000024.wal | 1 + .../quorum/rabbit@rabbitmq/meta.dets | Bin 0 -> 5464 bytes .../quorum/rabbit@rabbitmq/names.dets | Bin 0 -> 5464 bytes .../rabbit_durable_exchange.DCD | Bin 0 -> 1327 bytes .../rabbit@rabbitmq/rabbit_durable_queue.DCD | 1 + .../rabbit@rabbitmq/rabbit_durable_route.DCD | 1 + .../rabbit_runtime_parameters.DCD | Bin 0 -> 193 bytes .../data/mnesia/rabbit@rabbitmq/rabbit_serial | 1 + .../rabbit_topic_permission.DCD | 1 + .../mnesia/rabbit@rabbitmq/rabbit_user.DCD | Bin 0 -> 230 bytes .../rabbit_user_permission.DCD | Bin 0 -> 188 bytes .../mnesia/rabbit@rabbitmq/rabbit_vhost.DCD | Bin 0 -> 170 bytes .../data/mnesia/rabbit@rabbitmq/schema.DAT | Bin 0 -> 27294 bytes .../mnesia/rabbit@rabbitmq/schema_version | 1 + services/postgis/tagbase_schema.sql | 2 +- services/subscriber/Dockerfile | 18 +++ services/subscriber/db_utils.py | 107 ++++++++++++++++++ services/subscriber/rabbitmq_subscriber.py | 76 +++++++++++++ services/subscriber/requirements.txt | 2 + services/zeromq/Dockerfile | 14 --- services/zeromq/zeromq_server.py | 46 -------- tagbase_server/Dockerfile | 7 +- tagbase_server/pyproject.toml | 1 + tagbase_server/requirements.txt | 1 + tagbase_server/setup.py | 1 + tagbase_server/tagbase_server/__main__.py | 12 +- .../controllers/events_controller.py | 13 ++- .../tagbase_server/utils/db_utils.py | 4 +- .../tagbase_server/utils/processing_utils.py | 67 +++++------ .../tagbase_server/utils/rabbitmq_utils.py | 14 +++ 51 files changed, 366 insertions(+), 148 deletions(-) create mode 100644 rabbitmq/data/.bash_history create mode 100644 rabbitmq/data/.erlang.cookie create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/meta.dets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/names.dets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/.config create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/.vhost rename services/zeromq/db_utils.py => rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/0.rdq (100%) create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/clean.dot create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/file_summary.ets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/msg_store_index.ets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/0.rdq create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/clean.dot create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/file_summary.ets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/msg_store_index.ets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/recovery.dets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/meta.dets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/names.dets create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_exchange.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_runtime_parameters.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user_permission.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_vhost.DCD create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/schema.DAT create mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version create mode 100644 services/subscriber/Dockerfile create mode 100644 services/subscriber/db_utils.py create mode 100644 services/subscriber/rabbitmq_subscriber.py create mode 100644 services/subscriber/requirements.txt delete mode 100644 services/zeromq/Dockerfile delete mode 100644 services/zeromq/zeromq_server.py create mode 100644 tagbase_server/tagbase_server/utils/rabbitmq_utils.py diff --git a/docker-compose.yml b/docker-compose.yml index 77da7f9..c5e40e7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -113,6 +113,53 @@ services: - ./dbbackups:/backups - ./postgis-data:/var/lib/postgresql - ./services/postgis/tagbase_schema.sql:/docker-entrypoint-initdb.d/tagbase_schema.sql + rabbitmq: + depends_on: + postgis: + condition: service_healthy + healthcheck: + test: "set -eo pipefail rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }, { [], no_alarms } = { rabbit:alarms(), no_alarms }, [] /= rabbit_networking:active_listeners(), rabbitmq_node_is_healthy.' || exit 1" + interval: 15s + timeout: 5s + retries: 5 + start_period: 15s + hostname: rabbitmq + image: rabbitmq:3-management-alpine + labels: + "docker_compose_diagram.cluster": "Internal Network" + "docker_compose_diagram.description": "rabbitmq messaging service" + "docker_compose_diagram.icon": "rabbitmq" + networks: + - internal-network + ports: + - 5672:5672 + - 15672:15672 + restart: unless-stopped + volumes: + - ./rabbitmq/data/:/var/lib/rabbitmq/ + - ./logs/rabbitmq/:/var/log/rabbitmq/ + rabbitmq_subscriber: + build: + context: ./services/subscriber + depends_on: + rabbitmq: + condition: service_healthy + environment: + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_PORT=${POSTGRES_PORT} + hostname: rabbitmq_subscriber + labels: + "docker_compose_diagram.cluster": "Internal Network" + "docker_compose_diagram.description": "rabbitmq subscriber service" + "docker_compose_diagram.icon": "rabbitmq" + links: + - postgis + - rabbitmq + networks: + - internal-network + restart: unless-stopped + volumes: + - ./logs/rabbitmq_subscriber:/usr/src/app/logs/rabbitmq_subscriber slack_docker: environment: - webhook=${webhook} @@ -144,35 +191,11 @@ services: "docker_compose_diagram.icon": "flask" links: - postgis + - rabbitmq networks: - internal-network restart: unless-stopped volumes: - - ./logs:/usr/src/app/logs - zeromq: - build: - context: ./services/zeromq - depends_on: - postgis: - condition: service_healthy - tagbase_server: - condition: service_healthy - environment: - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_PORT=${POSTGRES_PORT} - expose: - - 5555 - hostname: zeromq - labels: - "docker_compose_diagram.cluster": "Internal Network" - "docker_compose_diagram.description": "zeromq messaging service" - "docker_compose_diagram.icon": "zeromq" - links: - - postgis - networks: - - internal-network - restart: unless-stopped - # volumes: - # - ./logs:/usr/src/app/logs + - ./logs/tagbase_server:/usr/src/app/logs/tagbase_server networks: internal-network: \ No newline at end of file diff --git a/openapi.yaml b/openapi.yaml index cfd7a62..427e412 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -353,7 +353,7 @@ components: event200: description: Information for a given event example: - event_id: 1 + event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" tag_id: 1 submission_id: 1 event_category: "submission" @@ -426,22 +426,23 @@ components: title: eventPut200 type: object event_id: - description: Unique numeric event ID associated with the ingested tag data file - example: 1 + description: UUID associated with a particular event + example: "06335e84-2872-4914-8c5d-3ed07d2a2f16" + format: uuid title: event_id - type: integer + type: string events200: description: Response detailing all available events example: count: 3 events: - - event_id: 1 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2f16" tag_id: 1 submission_id: 1 - - event_id: 2 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" tag_id: 1 submission_id: 1 - - event_id: 3 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2fkf" tag_id: 1 submission_id: 1 properties: @@ -450,12 +451,12 @@ components: example: 3 type: integer events: - description: List of unique numeric event IDs and associated tag and submission IDs + description: List of event UUID's and associated tag and submission IDs type: array items: - description: Unique numeric event ID and associated tag and submission IDs + description: UUID and associated tag and submission ID example: - event_id: 1 + event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" tag_id: 1 submission_id: 1 properties: diff --git a/rabbitmq/data/.bash_history b/rabbitmq/data/.bash_history new file mode 100644 index 0000000..4950a33 --- /dev/null +++ b/rabbitmq/data/.bash_history @@ -0,0 +1,10 @@ +set -eo pipefail +rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running } +rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }' +rabbitmqctl +rabbitmqctl eval { true, rabbit_app_booted_and_running } +rabbitmqctl eval '{ true, rabbit_app_booted_and_running }' +rabbitmqctl eval +rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }, { [], no_alarms } = { rabbit:alarms(), no_alarms }, [] /= rabbit_networking:active_listeners(), rabbitmq_node_is_healthy.' +rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }, { [], no_alarms } = { rabbit:alarms(), no_alarms }, [] /= rabbit_networking:active_listeners(), rabbitmq_node_is_healthy.' +exit diff --git a/rabbitmq/data/.erlang.cookie b/rabbitmq/data/.erlang.cookie new file mode 100644 index 0000000..d3f0f93 --- /dev/null +++ b/rabbitmq/data/.erlang.cookie @@ -0,0 +1 @@ +YWTZUIPTXZJYSVHEEOJU \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags b/rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags new file mode 100644 index 0000000..38c8ca2 --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags @@ -0,0 +1,6 @@ +[classic_mirrored_queue_version,classic_queue_type_delivery_support, + direct_exchange_routing_v2,drop_unroutable_metric,empty_basic_get_metric, + feature_flags_v2,implicit_default_bindings,listener_records_in_ets, + maintenance_mode_status,quorum_queue,stream_queue, + stream_single_active_consumer,tracking_records_in_ets,user_limits, + virtual_host_metadata]. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config b/rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config new file mode 100644 index 0000000..178bfa9 --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config @@ -0,0 +1 @@ +{[rabbit@rabbitmq],[rabbit@rabbitmq]}. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal b/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal new file mode 100644 index 0000000..698b19c --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal @@ -0,0 +1 @@ +RAWA \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/meta.dets b/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/meta.dets new file mode 100644 index 0000000000000000000000000000000000000000..c1ae47e3b05f8128913ddbd12be673bebf7cc413 GIT binary patch literal 5464 zcmZQz5Vc@fefB*A0|O%zb0U~vF`zIAFfuT~7!aDSiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!KiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!K4|_EY)hk28!*W$TA%+y2U)o@4HWyHfLLJ zELndot|IqHfdm^+l==k}m#($}&?`2|Md=|@_7OxxtgI2i%BBw=^akvDHv{+|bUqdSlD0bY%5qcX=1Ny)=3mwf}%fisAJ5cO_0ASFb63JPi(N^THD3pF8)<-pU z&aad9RD%K+Q8eZSG*^D*B4E&LxDZW%BsoG5iE(O$2u^joehUKskd1DyR$OoA<=m69i6wFHB-Iq!0`$_Wdao%J1Z8y}j#TNEJ)9nzMmoJ1Fv8htqyBck=uClcmd- zjc1m=f6dP$_dtOJ8&FjG5fqoMu>sI4HY`NtAyRe`L`1A?5W&iJ9o$=@4)7m3md?}& z-h*0i8W}JSTk#s@hi+*BXMgQdsy9Yov>l1r0yNTMo$Y*ORv1w1u#d-$zu`VO1YYP0 zxNnRR*mjMcvWcYITBoSz_hce8>Ew72c}0v>{4BIJ6($Z9AdSdFi1g7dw6*ghjfZiK zE`SW$SwiY2ue6NysS%|^eoqr6sJAdR+OE}D$AgCemYi{I3s+I-M6qp`m!#RhNlCSn z{*5;YT8}80V2W)u)L&>DC%xK7q>m!vD33dZs2|N8(Usg4edO-a=w;ab10pHQAGFHC ABme*a literal 0 HcmV?d00001 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/msg_store_index.ets b/rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/msg_store_index.ets new file mode 100644 index 0000000000000000000000000000000000000000..3af7cad30af1560175657d81a9ac72084d616819 GIT binary patch literal 533 zcmZ`$J5B>J5cO_0ASFb63JPi(N^St6g_@AY@@GQ4tUZhEkSr|{6_=slCY*o@Zo$}F z5P@)G`+df5JViOE)bwi1Sn)2eFVCMA2a8g((&!l*XtwKxlqR0_o4r?GHy^BAy=*_T z^6hhWk+g>z6u9U`V;(_sg-aK~2hE0a(F916Jr5!=PA$C$r@BpYf8|Yt|Jad2+j%Z>B~QX?0mjqj(B){A6BaZ(>O15%-0~371BV#6 zaT?ay6{t7C^3kCt!|$I-)Hv7TQOJfAKkZksYY7+xh)K3}2VILnl<6{RJw{N`pd+ST z%EqXKPpqiX^9TA#qD0YS#%*9Z&IHyGD5j>cb8M*7LbDx|HP!5AQ4@BOXPekm`j`wH Q=Y7y@N!5pWa)QMV=pu(iOt7?c-GD^l)!KySo`r zr{gDEsdPMcw3Db9ITJ-mfkR`L*G$#5vPmwLHT0auZgV9i4>RKLL=;vj-Laam;;pip zR?>81bFRh;3Y>LSVTF>)6mz4@EnnQ!Royy|51%)6rsYvK?jny_{a&Nb8#H?uJ)Q(Y zJiCA}&u$>Xvjd#3c&)Y>_aEKg{MP7`Gyp*)qZTVhV2c*OgkMxi6 literal 0 HcmV?d00001 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown b/rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown new file mode 100644 index 0000000..2843977 --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown @@ -0,0 +1 @@ +[rabbit@rabbitmq]. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal b/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal new file mode 100644 index 0000000..698b19c --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal @@ -0,0 +1 @@ +RAWA \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/meta.dets b/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/meta.dets new file mode 100644 index 0000000000000000000000000000000000000000..c1ae47e3b05f8128913ddbd12be673bebf7cc413 GIT binary patch literal 5464 zcmZQz5Vc@fefB*A0|O%zb0U~vF`zIAFfuT~7!aDSiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!KiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!K+X1b}{rZ!E%yC=bS@F?QJhw*JZnY4Bh zM3Aubl$lA${PNF#MteahZmy;Pu;kav%Lf@FO!9KQlEjcS!afRI*}?{bMRKx%_&hmX zBxeYRnsJ_)i*6|@8E}C3i9_)C!6Ax1?VYdPoil_3x)+kEHObH>n(DIAf(mD!r%vUq9fmfBmy_Gk!%UNKqt^$+IQiK}V;C0m3l8~{mH+?% literal 0 HcmV?d00001 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD new file mode 100644 index 0000000..f8dd237 --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD @@ -0,0 +1 @@ +cXM \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD new file mode 100644 index 0000000..f8dd237 --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD @@ -0,0 +1 @@ +cXM \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_runtime_parameters.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_runtime_parameters.DCD new file mode 100644 index 0000000000000000000000000000000000000000..a8c88b9a7679106f6961c370667f6dc3859d7b7c GIT binary patch literal 193 zcmXwxu?oU46h)t=R?tDve{c|5tafp;n@Am;)Imbh7n<0nYT8nqodo~RA2Lv#?%}{a zoi1^b<8BB5F+ar7Q@RL){F+iJ7z%^X#{{Vr8IZuAXW)kZBJh_8Bf~gX)w&&hmxKej zZw}=3;m~uYws~%w1YvCIqEcGW8#7FcN|+L1q6!NI%W0C=rA4Yx+uQ8l^nTt`=#?U( S=}D#Ge)VX!>=MP*MmAq}!aF|z literal 0 HcmV?d00001 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial new file mode 100644 index 0000000..df11948 --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial @@ -0,0 +1 @@ +24. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD new file mode 100644 index 0000000..f8dd237 --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD @@ -0,0 +1 @@ +cXM \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user.DCD new file mode 100644 index 0000000000000000000000000000000000000000..86a9345e0561faea4fe43ef8068e8261b31b0dbe GIT binary patch literal 230 zcmZQ%VrEH>@a1M;VDL!__i=2_U`t`(%E?cU&qz&7Ni9lYU{6U-i3bW~GcX(K8DulC zndliA>KUgn@E0W}C1sX4K&afp4CW*T2DY9g2IkJoNeryh>wso9!Oi5&%qvMP%1g|N zFD*_j$_2`?rkAD`mw;F*lCmiQn^_8_w!dYXRqyfV<8I&D!~*LjM=v+Tzd9Q*=YvS1 zRzwa^A!7;yZ(>SrW?p7-Nl{`+eo@a1M;VDL!__i=2_U`t`(%E?cU&qz&7Ni9lYU{6U-i3bW~GcX(K8DulC zndliA>KUgn@E0W}C1sX4K&afp4CW*T2DY9g2IkJoNery>U4UjrK+I$YsxB=~Es8Hl vEy~R-F3!x)%K(aTfyK%)@{3Dyfr?qvOH&~%M*R#Hpfo}?NSaAc3zY%@)!Q-$ literal 0 HcmV?d00001 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_vhost.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_vhost.DCD new file mode 100644 index 0000000000000000000000000000000000000000..186fdeb93cc7cd1bfeaea929ec492c05b396a291 GIT binary patch literal 170 zcmZQ%VrEH>@a1M;VDL!__i=2_U`t`(%E?cU&qz&7Ni9lYU{6U-i3bW~GcX(K8DulC zndliA>KUgn@E0W}C1sX4K&afp4CW*T2DY9g2IkJoNerwPLV#wvLCj=HVPGxG$S*F* z1&T51XO#dcrW6M5l+@znqRfJl%=|o%jEGBWT4HHVi9%UsQAufHjsi$a3Ij_?VtR2F E0PZ?3d;kCd literal 0 HcmV?d00001 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/schema.DAT b/rabbitmq/data/mnesia/rabbit@rabbitmq/schema.DAT new file mode 100644 index 0000000000000000000000000000000000000000..9ce3422562b9da84ee10d63973fb7c02e7ed2f80 GIT binary patch literal 27294 zcmeHPON<;x8LsYm?Ci|0XERyv+5|sxa3aJ*Ai~3kDCVUohX9dC2q8;T(_NYAbw9@a zSli-Y0tY}s0tvXn0WRP(QZC3yLE^*(i3oySb=A15r+c#Nnckh9 zt&v7E_2}yEzy9xk)nA2>{SvzTmv<0C8vian_=|L?TT8wgEAT$>ll!SdZ47q=r!xP!!mA6j)aH-*+Dx9-4jP;^7|$o`0pUTd3+`mkg^}O>}VpHIJS?C)rY>^q<`kER30K6=1*+CQon^7_W zJRIA#qtHSPmqggY5#Axgbi&97J|g28>C30zdzmxR{X3H}lD?MAzkW!b{}!Kr%Iewa zPq)qe>@V9OKLQV?n35ug3u-Xiv?-A$%L5#S=c#n*5?NCsJ>u^0#hGQctFKcAb)M+u7LcNyOSw}Nf=69yRRrvr2p4B{v)vH%`pO?sNdX6 zfR!v|@oe9=mlh6M@<@pybe(tVKzX;psI#|M%qi zKg#hR64y4y3W4IDiZf^=Z#N{un?L-UkOmCb`VTLQ*cpS*Y2+6GGvH z5(;I*Y2S@sh=11(0zaU>Ts$z$zU^4yfS4or&+m3%@V{34{-a``!a{$`$t83#a9~>? z9EQX#1^=CwSHH!bM&sQnIgR^{%hdxHQ9xhk`oAsff8o`q^c&YzSrJg&^T0@f&M1{~ z3;{3=mY5;18@|AkPkiQ^0`~t=urFf&17EKbj{glA|6*NFg#O3{vyZ*rhyzg6FNOVC z$Pk=q z$XBKr#?kAE{@4B|&%f~M1Bg@GF6>9Zu)hav)`0DdNpA)z?m3=A7@5+kALERK*~|#( zj8Z3G5G0Ym5>tZSIQ26j2(G@T4D=B-rz#34pkL?y|64Nt#rnVS|IfuF9&!G2JIy`n z8`wR^$dQBf)uGZaa`~y{;KZT5MGXJEM*`XNudpS7X6RsOhyUQ%x5JB`X{2j9bpIlx zWw69d)-XO!umHL|8B2;7)WQhUAjF|0o?!4ThZcJ5!ObA^o8>baVLy&6cwm~?nCx{E z0SCIriR}b!(jS8jT8aw1ac5Dez)MQT72tWN)%qds|JP;w3;*7+L)bn$x(6`Zo<@Ba z_ZT!D>edG+_t>0{j)xP_;l^w8CG#tGCHNOA1vMA$UqN_D(|2gJ zoE|MeEGBSverA9&0oZo;T{i*#^oJh^6X3*7On^#qhy+(!!viS#l`cZ{A9qp3yVOO^ zZ{LAa7WA471QFp%E%DL}u54JOpj!$Y^SwDDod`$ilcOaeAeJP;(RP3!)tH8zKyca% zR$u(i+al}jidz0Ycxp|(@ir+?eVzONYW;8gF+pnmZ}b2sD&$4{JAG1-{t;P+cKBv8 zc1%ibHZh#z zMIsj3*XWb1u=BOx=qxi10^-s1lPa8lMK+neET3xitg~kEm5EEr<=m#&RoxT?W`Y9M z<6Qq|_WvB`BAXfME~a^E9`-;KvH0%DBH;UUPmA)|juAX$KwvGOPWe)u#|rC|lrc@+ zV`mhWlp?V7%0C7|Jm1=RL{O~@qn(F`=4V~_NU}PB;Plnc!KtCo!S@AvdguzTUqZ-D zmk%Knr^_n{{UTj{1ZjuUM5ieT literal 0 HcmV?d00001 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version b/rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version new file mode 100644 index 0000000..203a50c --- /dev/null +++ b/rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version @@ -0,0 +1 @@ +[store_msg,persistent_bytes,multiple_routing_keys,exchange_options,queue_options,topic_permission,vhost_limits,user_password_hashing,cluster_name,policy_apply_to,topic_trie_node,mirrored_supervisor,gm,user_admin_to_tags,exchange_event_serial,semi_durable_route,topic_trie,add_opts_to_listener,remove_user_scope,move_messages_to_vhost_store]. diff --git a/services/postgis/tagbase_schema.sql b/services/postgis/tagbase_schema.sql index 949f2b5..b258edd 100644 --- a/services/postgis/tagbase_schema.sql +++ b/services/postgis/tagbase_schema.sql @@ -47,7 +47,7 @@ COMMENT ON COLUMN events_log.submission_id IS 'Unique numeric ID assigned upon s COMMENT ON COLUMN events_log.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; -COMMENT ON COLUMN events_log.event_id IS 'Unique numeric ID associated with the persisted event'; +COMMENT ON COLUMN events_log.event_id IS 'UUID associated with the persisted event'; COMMENT ON COLUMN events_log.event_category IS 'A specific phase within the Tagbase lifecycle e.g. submission, ingestion, migration, reporting.'; diff --git a/services/subscriber/Dockerfile b/services/subscriber/Dockerfile new file mode 100644 index 0000000..08a1f30 --- /dev/null +++ b/services/subscriber/Dockerfile @@ -0,0 +1,18 @@ +FROM python:slim-bullseye + +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +COPY requirements.txt /usr/src/app/ + +RUN apt update && \ + apt install -y --force-yes bash curl gcc iputils-ping && \ + apt clean all && \ + python3 -m pip install --upgrade pip && \ + python3 -m pip install -r requirements.txt --no-cache-dir + + +ADD rabbitmq_subscriber.py rabbitmq_subscriber.py +ADD db_utils.py db_utils.py + +CMD ["python3", "rabbitmq_subscriber.py"] diff --git a/services/subscriber/db_utils.py b/services/subscriber/db_utils.py new file mode 100644 index 0000000..fc30f09 --- /dev/null +++ b/services/subscriber/db_utils.py @@ -0,0 +1,107 @@ +import logging +import os +import psycopg2 +import psycopg2.extras + +psycopg2.extras.register_uuid() +logger = logging.getLogger("rabbitmq_subscriber") + + +def connect(): + """ + Make and return a connection to TagbaseDB. This function also improves handling of Operational errors + if they occur. + :rtype: connection + """ + logger.debug("Attempting connection to TagbaseDB...") + try: + conn = psycopg2.connect( + dbname="tagbase", + user="tagbase", + host="postgis", + port=os.getenv("POSTGRES_PORT"), + password=os.getenv("POSTGRES_PASSWORD"), + ) + except psycopg2.OperationalError as poe: + logger.error("Unable to connect to the database") + return { + "code": "500", + "message": "Encountered psycopg2.OperationalError when attempting to establish a connection " + "to the Tagbase database.", + "more_info": "Contact the service administrator - {email}".format( + email=os.getenv("PGADMIN_DEFAULT_EMAIL") + ), + "trace": poe, + } + logger.debug("Successfully connected to TagbaseDB.") + return conn + + +def create_event( + event_category=None, + event_id=None, + event_name=None, + event_status=None, + time_start=None, +): + """ + Create a new event in the events_log table. Note the event_id UUID is not automatically generated. + It must be passed to this function call. + """ + logger.debug("Creating new event: %s in events log...", event_id) + event_conn = connect() + with event_conn: + with event_conn.cursor() as event_cur: + event_cur.execute( + "INSERT INTO events_log (event_id, event_category, event_name, time_start, event_status) " + "VALUES (%s, %s, %s, %s, %s)", + (event_id, event_category, event_name, time_start, event_status), + ) + logger.info( + "CREATED new event: '%s'", + str(event_id), + ) + event_conn.commit() + event_cur.close() + event_conn.close() + + +def update_event( + duration=None, + event_id=None, + event_status=None, + submission_id=None, + tag_id=None, + time_end=None, +): + """ + Update existing event in the events_log table with new data. + """ + logger.debug( + "Updating event: '%s' in events log...", + event_id, + ) + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "UPDATE events_log " + "SET submission_id = %s, tag_id = %s, event_id = %s, time_end = %s, duration = %s, event_status = %s" + " WHERE event_id = %s", + ( + submission_id, + tag_id, + event_id, + time_end, + duration, + event_status, + event_id, + ), + ) + logger.info( + "UPDATED event: '%s'", + str(event_id), + ) + conn.commit() + cur.close() + conn.close() diff --git a/services/subscriber/rabbitmq_subscriber.py b/services/subscriber/rabbitmq_subscriber.py new file mode 100644 index 0000000..372139c --- /dev/null +++ b/services/subscriber/rabbitmq_subscriber.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 +import logging +import os +import pika +import sys + +from logging.handlers import RotatingFileHandler + +LOGGER_NAME = "rabbitmq_subscriber" + +os.makedirs("./logs/{}".format(LOGGER_NAME), exist_ok=True) +logger = logging.getLogger(LOGGER_NAME) +if logger.hasHandlers(): + logger.handlers = [] +logger.setLevel(logging.INFO) + +formatter = logging.Formatter( + "%(asctime)s - %(filename)s:%(lineno)d - %(levelname)s - %(message)s" +) + +s_handler = logging.StreamHandler() +s_handler.setFormatter(formatter) +logger.addHandler(s_handler) + +rf_handler = RotatingFileHandler( + f"./logs/{LOGGER_NAME}/{LOGGER_NAME}_log.txt", + mode="a", + maxBytes=100000, + backupCount=10, +) +rf_handler.setFormatter(formatter) +logger.addHandler(rf_handler) + + +def process_topic(topic=None, msg_parts=None): + import db_utils + import uuid + + if topic == "event_log/create": + db_utils.create_event( + event_category=msg_parts[0], + event_id=uuid.UUID(msg_parts[1]), + event_name=msg_parts[2], + event_status=msg_parts[3], + time_start=msg_parts[4], + ) + else: + db_utils.update_event( + duration=msg_parts[0], + event_id=uuid.UUID(msg_parts[1]), + event_status=msg_parts[2], + submission_id=msg_parts[3], + tag_id=msg_parts[4], + time_end=msg_parts[5], + ) + + +def subscriber(): + connection = pika.BlockingConnection(pika.ConnectionParameters(host="rabbitmq")) + channel = connection.channel() + channel.queue_declare(queue="event_log") + + def callback(ch, method, properties, body): + logger.info("Received: %r" % body) + topic, messagedata = body.decode("utf-8").split(" ", 1) + process_topic(topic, messagedata.split(" ")) + + channel.basic_consume( + queue="event_log", on_message_callback=callback, auto_ack=True + ) + logger.info("Waiting for messages...") + channel.start_consuming() + + +if __name__ == "__main__": + subscriber() diff --git a/services/subscriber/requirements.txt b/services/subscriber/requirements.txt new file mode 100644 index 0000000..a8e6b2e --- /dev/null +++ b/services/subscriber/requirements.txt @@ -0,0 +1,2 @@ +pika==1.3.1 +psycopg2-binary==2.9.5 \ No newline at end of file diff --git a/services/zeromq/Dockerfile b/services/zeromq/Dockerfile deleted file mode 100644 index 2ea7261..0000000 --- a/services/zeromq/Dockerfile +++ /dev/null @@ -1,14 +0,0 @@ -FROM ubuntu:22.04 - -RUN apt-get update -RUN apt-get install -y --force-yes python python-dev python-setuptools software-properties-common gcc python-pip -RUN apt-get clean all - -RUN pip install pyzmq - -ADD zeromq_server.py /tmp/zeromq_server.py - -# Zmq Sub Server -EXPOSE 5555 - -CMD ["python","/tmp/zmqserver.py"] diff --git a/services/zeromq/zeromq_server.py b/services/zeromq/zeromq_server.py deleted file mode 100644 index 6a3dbcc..0000000 --- a/services/zeromq/zeromq_server.py +++ /dev/null @@ -1,46 +0,0 @@ -import signal -import zmq - -logging.basicConfig(filename="subscriber.log", level=logging.INFO) - -signal.signal(signal.SIGINT, signal.SIG_DFL) - -context = zmq.Context() - -socket = context.socket(zmq.SUB) -socket.connect("tcp://localhost:5555") -socket.setsockopt(zmq.SUBSCRIBE, b"event_log") - - -def process_topic(topic=None): - import db_utils.py - - if topic is "event_log/create": - db_utils.create_event( - event_category=msg_parts[0], - event_id=msg_parts[1], - event_name=msg_parts[2], - event_status=msg_parts[3], - time_start=msg_parts[4], - ) - else: - db_utils.update_event( - duration=msg_parts[0], - event_id=msg_parts[1], - event_status=msg_parts[2], - submission_id=msg_parts[3], - tag_id=msg_parts[4], - time_end=msg_parts[5], - ) - - -while True: - message = socket.recv() - topic, messagedata = string.split() - msg_parts = messagedata.split(" ") - logging.info( - "topic: {} message: {} - {}".format( - topic, messagedata, time.strftime("%Y-%m-%d %H:%M") - ) - ) - process_topic(topic) diff --git a/tagbase_server/Dockerfile b/tagbase_server/Dockerfile index ba47906..36495ab 100644 --- a/tagbase_server/Dockerfile +++ b/tagbase_server/Dockerfile @@ -1,14 +1,14 @@ FROM python:slim-bullseye RUN mkdir -p /usr/src/app -RUN mkdir -p /usr/src/app/logs WORKDIR /usr/src/app COPY requirements.txt /usr/src/app/ RUN apt update && \ apt -y upgrade && \ - apt -y install bash gcc musl-dev tzdata && \ + apt -y install bash curl gcc musl-dev tzdata && \ + python3 -m pip install --upgrade pip && \ python3 -m pip install -r requirements.txt --no-cache-dir RUN ln -fs /usr/share/zoneinfo/Etc/UTC /etc/localtime @@ -16,7 +16,8 @@ RUN ln -fs /usr/share/zoneinfo/Etc/UTC /etc/localtime COPY . /usr/src/app EXPOSE 5433 +EXPOSE 5555 ENTRYPOINT ["gunicorn"] -CMD ["tagbase_server.__main__:app"] +CMD ["tagbase_server.__main__:app", "--preload"] diff --git a/tagbase_server/pyproject.toml b/tagbase_server/pyproject.toml index b3ace2b..8619a9f 100644 --- a/tagbase_server/pyproject.toml +++ b/tagbase_server/pyproject.toml @@ -6,6 +6,7 @@ requires = [ "pandas>=1.4.2", "parmap>=1.5.3", "patool>=1.12", + "pika>=1.3.1", "psycopg2-binary==2.9.3", "python_dateutil>=2.6.0", "pytz>=2021.3", diff --git a/tagbase_server/requirements.txt b/tagbase_server/requirements.txt index 6cc60d1..6da3683 100644 --- a/tagbase_server/requirements.txt +++ b/tagbase_server/requirements.txt @@ -5,6 +5,7 @@ gunicorn==20.1.0 pandas>=1.4.2 parmap>=1.5.3 patool>=1.12 +pika>=1.3.1 psycopg2-binary==2.9.5 python_dateutil>=2.6.0 pytz>=2021.3 diff --git a/tagbase_server/setup.py b/tagbase_server/setup.py index abbf59e..2730363 100644 --- a/tagbase_server/setup.py +++ b/tagbase_server/setup.py @@ -20,6 +20,7 @@ "pandas>=1.4.2", "parmap>=1.5.3", "patool>=1.12", + "pika>=1.3.1", "psycopg2-binary==2.9.3", "python_dateutil>=2.6.0", "pytz>=2021.3", diff --git a/tagbase_server/tagbase_server/__main__.py b/tagbase_server/tagbase_server/__main__.py index 88d19e3..288722b 100644 --- a/tagbase_server/tagbase_server/__main__.py +++ b/tagbase_server/tagbase_server/__main__.py @@ -10,21 +10,25 @@ LOGGER_NAME = "tagbase_server" -if not os.path.exists("./logs"): - os.makedirs("./logs") +os.makedirs("./logs/{}".format(LOGGER_NAME), exist_ok=True) logger = logging.getLogger(LOGGER_NAME) if logger.hasHandlers(): logger.handlers = [] logger.setLevel(logging.INFO) -formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") +formatter = logging.Formatter( + "%(asctime)s - %(filename)s:%(lineno)d - %(levelname)s - %(message)s" +) s_handler = logging.StreamHandler() s_handler.setFormatter(formatter) logger.addHandler(s_handler) rf_handler = RotatingFileHandler( - f"./logs/{LOGGER_NAME}_log.txt", mode="a", maxBytes=100000, backupCount=10 + f"./logs/{LOGGER_NAME}/{LOGGER_NAME}_log.txt", + mode="a", + maxBytes=100000, + backupCount=10, ) rf_handler.setFormatter(formatter) logger.addHandler(rf_handler) diff --git a/tagbase_server/tagbase_server/controllers/events_controller.py b/tagbase_server/tagbase_server/controllers/events_controller.py index d84a833..7ae3946 100644 --- a/tagbase_server/tagbase_server/controllers/events_controller.py +++ b/tagbase_server/tagbase_server/controllers/events_controller.py @@ -2,6 +2,7 @@ from tagbase_server.models.event_put200 import EventPut200 # noqa: E501 from tagbase_server.models.events200 import Events200 # noqa: E501 from tagbase_server.models.response500 import Response500 # noqa: E501 +from tagbase_server.utils.db_utils import connect from tagbase_server import util @@ -27,7 +28,7 @@ def get_event(event_id): # noqa: E501 return Event200.from_dict( { "event_category": result[0], - "event_id": result[1], + "event_id": str(result[1]), "event_name": result[2], "event_notes": result[3], "event_status": result[4], @@ -58,7 +59,7 @@ def list_all_events(): # noqa: E501 for event in cur.fetchall(): events.append( { - "event_id": event[0], + "event_id": str(event[0]), "tag_id": event[1], "submission_id": event[2], } @@ -67,7 +68,7 @@ def list_all_events(): # noqa: E501 "SELECT COUNT(DISTINCT event_id) FROM events_log", ) count = cur.fetchone()[0] - return Events200.from_dict({"count": count, "events": tags}) + return Events200.from_dict({"count": count, "events": events}) def list_events(tag_id, sub_id): # noqa: E501 @@ -94,7 +95,7 @@ def list_events(tag_id, sub_id): # noqa: E501 for event in cur.fetchall(): events.append( { - "event_id": event[0], + "event_id": str(event[0]), "tag_id": event[1], "submission_id": event[2], } @@ -103,7 +104,7 @@ def list_events(tag_id, sub_id): # noqa: E501 "SELECT COUNT(DISTINCT event_id) FROM events_log", ) count = cur.fetchone()[0] - return Events200.from_dict({"count": count, "events": tags}) + return Events200.from_dict({"count": count, "events": events}) def put_event(event_id, notes=None): # noqa: E501 @@ -126,5 +127,5 @@ def put_event(event_id, notes=None): # noqa: E501 "UPDATE events_log SET notes = %s WHERE event_id = %s", (notes, event_id), ) - message = f"Event: '{int(event_id)}' successfully updated." + message = f"Event: '{str(event_id)}' successfully updated." return EventPut200.from_dict({"code": "200", "message": message}) diff --git a/tagbase_server/tagbase_server/utils/db_utils.py b/tagbase_server/tagbase_server/utils/db_utils.py index 8dfe207..8e2e942 100644 --- a/tagbase_server/tagbase_server/utils/db_utils.py +++ b/tagbase_server/tagbase_server/utils/db_utils.py @@ -13,7 +13,7 @@ def connect(): if they occur. :rtype: connection """ - logger.info("Attempting connection to TagbaseDB...") + logger.debug("Attempting connection to TagbaseDB...") try: conn = psycopg2.connect( dbname="tagbase", @@ -35,5 +35,5 @@ def connect(): "trace": poe, } ) - logger.info("Successfully connected to TagbaseDB.") + logger.debug("Successfully connected to TagbaseDB.") return conn diff --git a/tagbase_server/tagbase_server/utils/processing_utils.py b/tagbase_server/tagbase_server/utils/processing_utils.py index b9329d8..249a020 100644 --- a/tagbase_server/tagbase_server/utils/processing_utils.py +++ b/tagbase_server/tagbase_server/utils/processing_utils.py @@ -3,16 +3,17 @@ from datetime import datetime as dt from io import StringIO import time -import uuid import pandas as pd import psycopg2.extras import pytz + from slack_sdk import WebClient from slack_sdk.errors import SlackApiError from tzlocal import get_localzone -from tagbase_server.utils.db_utils import connect, create_event, update_event +from tagbase_server.utils.db_utils import connect +from tagbase_server.utils.rabbitmq_utils import publish_message logger = logging.getLogger(__name__) slack_token = os.environ.get("SLACK_BOT_TOKEN", "") @@ -25,12 +26,10 @@ def process_global_attributes( ): event_id = uuid.uuid4() global_start = time.perf_counter() - create_event( - event_category="metadata", - event_id=event_id, - event_name="populating metadata for new tag submission", - event_status="running", - time_start=start, + publish_message( + "event_log/create metadata {} populating-metadata-for-new-tag-submission running {}".format( + event_id, start + ) ) logger.debug("Processing global attribute: %s", line) tokens = line.strip()[1:].split(" = ") @@ -60,26 +59,23 @@ def process_global_attributes( global_finish = time.perf_counter() global_elapsed = round(finish - start, 2) submission_id = cur.fetchone()[0] - update_event( - duration=global_elapsed, - event_id=event_id, - event_status="finished", - submission_id=submission_id, - tag_id=submission_id, - time_end=global_finish, + publish_message( + "event_log/update {} {} finished {} {} {}".format( + global_elapsed, event_id, submission_id, submission_id, global_finish + ) ) def process_etuff_file(file, version=None, notes=None): logger.info("Started processing: %s", file) + import uuid + event_id = uuid.uuid4() start = time.perf_counter() - create_event( - event_category="submission", - event_id=event_id, - event_name="new tag submission", - event_status="running", - time_start=start, + publish_message( + "event_log/create submission {} new-tag-submission running {}".format( + event_id, dt.now(tz=pytz.utc).astimezone(get_localzone()) + ) ) submission_filename = file # full path name is now preferred rather than - file[file.rindex("/") + 1 :] logger.info( @@ -107,28 +103,27 @@ def process_etuff_file(file, version=None, notes=None): submission_filename, ) sub_finish = time.perf_counter() - sub_elapsed = round(finish - start, 2) + sub_elapsed = round(sub_finish - start, 2) cur.execute("SELECT currval('submission_submission_id_seq')") submission_id = cur.fetchone()[0] - update_event( - duration=sub_elapsed, - event_id=event_id, - event_status="finished", - submission_id=submission_id, - tag_id=submission_id, - time_end=sub_finish, + publish_message( + "event_log/update {} {} finished {} {} {}".format( + sub_elapsed, + event_id, + submission_id, + submission_id, + dt.now(tz=pytz.utc).astimezone(get_localzone()), + ) ) metadata = [] proc_obs = [] s_time = time.perf_counter() - create_event( - event_category="submission", - event_id=event_id, - event_name="new tag submission", - event_status="running", - time_start=start, - ) + # publish_message( + # "event_log/create submission {} new-tag-submission running {}".format( + # event_id, start + # ) + # ) with open(file, "rb") as data: lines = [line.decode("utf-8", "ignore") for line in data.readlines()] variable_lookup = {} diff --git a/tagbase_server/tagbase_server/utils/rabbitmq_utils.py b/tagbase_server/tagbase_server/utils/rabbitmq_utils.py new file mode 100644 index 0000000..71fbdf4 --- /dev/null +++ b/tagbase_server/tagbase_server/utils/rabbitmq_utils.py @@ -0,0 +1,14 @@ +import logging +import pika +import time + +logger = logging.getLogger(__name__) + + +def publish_message(message): + connection = pika.BlockingConnection(pika.ConnectionParameters(host="rabbitmq")) + channel = connection.channel() + channel.queue_declare(queue="event_log") + channel.basic_publish(exchange="", routing_key="event_log", body=message) + logger.info(" [x] Sent: {}".format(message)) + connection.close() From 1d7b610b286955ab70cad56057baf6a9692a9e03 Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Sun, 12 Feb 2023 13:37:49 -0800 Subject: [PATCH 10/12] ISSUE-174 Create events_log table --- .../tagbase_server/openapi/openapi.yaml | 497 +++++------------- 1 file changed, 132 insertions(+), 365 deletions(-) diff --git a/tagbase_server/tagbase_server/openapi/openapi.yaml b/tagbase_server/tagbase_server/openapi/openapi.yaml index f9f6ddd..427e412 100644 --- a/tagbase_server/tagbase_server/openapi/openapi.yaml +++ b/tagbase_server/tagbase_server/openapi/openapi.yaml @@ -53,14 +53,7 @@ paths: description: Get information about an individual event operationId: get_event parameters: - - description: Numeric event ID - explode: true - in: path - name: event_id - required: true - schema: - type: number - style: simple + - $ref: '#/components/parameters/eventId' responses: "200": content: @@ -82,28 +75,8 @@ paths: description: Update notes for an event operationId: put_event parameters: - - description: Numeric event ID - explode: true - in: path - name: event_id - required: true - schema: - type: number - style: simple - - description: "Free-form text field where details of submitted eTUFF file for\ - \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ - \ and measurements + primary position data, or just secondary solution-positional\ - \ meta/data)" - explode: true - in: query - name: notes - required: false - schema: - maxLength: 10000 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form + - $ref: '#/components/parameters/eventId' + - $ref: '#/components/parameters/notes' responses: "200": content: @@ -117,7 +90,7 @@ paths: schema: $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. - summary: Update the 'notes' associated with a event + summary: Update the 'notes' associated with an event tags: - events x-openapi-router-controller: tagbase_server.controllers.events_controller @@ -126,8 +99,8 @@ paths: description: Get network accessible file and execute ingestion operationId: ingest_get parameters: - - description: "Location of a network accessible (file, ftp, http, https) file\ - \ e.g. 'file:///usr/src/app/data/eTUFF-sailfish-117259.txt'." + - description: Location of a network accessible (file, ftp, http, https) + file e.g. 'file:///usr/src/app/data/eTUFF-sailfish-117259.txt'. explode: true in: query name: file @@ -138,43 +111,9 @@ paths: pattern: ^(?!\s*$).+ type: string style: form - - description: "Free-form text field where details of submitted eTUFF file for\ - \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ - \ and measurements + primary position data, or just secondary solution-positional\ - \ meta/data)" - explode: true - in: query - name: notes - required: false - schema: - maxLength: 10000 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form - - description: "Type of file to be ingested, defaults to 'etuff'" - explode: true - in: query - name: type - required: false - schema: - default: etuff - enum: - - etuff - - netcdf - type: string - style: form - - description: Version identifier for the eTUFF tag data file ingested - explode: true - in: query - name: version - required: false - schema: - maxLength: 10 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form + - $ref: '#/components/parameters/notes' + - $ref: '#/components/parameters/type' + - $ref: '#/components/parameters/version' responses: "200": content: @@ -196,55 +135,10 @@ paths: description: Post a local file and perform a ingest operation operationId: ingest_post parameters: - - description: Free-form text field to explicitly define the name of the file - to be persisted - explode: true - in: query - name: filename - required: true - schema: - maxLength: 100 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form - - description: "Free-form text field where details of submitted eTUFF file for\ - \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ - \ and measurements + primary position data, or just secondary solution-positional\ - \ meta/data)" - explode: true - in: query - name: notes - required: false - schema: - maxLength: 10000 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form - - description: "Type of file to be ingested, defaults to 'etuff'" - explode: true - in: query - name: type - required: false - schema: - default: etuff - enum: - - etuff - - netcdf - type: string - style: form - - description: Version identifier for the eTUFF tag data file ingested - explode: true - in: query - name: version - required: false - schema: - maxLength: 10 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form + - $ref: '#/components/parameters/filename' + - $ref: '#/components/parameters/notes' + - $ref: '#/components/parameters/type' + - $ref: '#/components/parameters/version' requestBody: content: application/octet-stream: @@ -307,14 +201,7 @@ paths: description: Get information about an individual tag operationId: get_tag parameters: - - description: Numeric tag ID - explode: true - in: path - name: tag_id - required: true - schema: - type: number - style: simple + - $ref: '#/components/parameters/tagId' responses: "200": content: @@ -337,47 +224,10 @@ paths: description: Update a tag submission operationId: put_tag parameters: - - description: "Free-form text field where details of submitted eTUFF file for\ - \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ - \ and measurements + primary position data, or just secondary solution-positional\ - \ meta/data)" - explode: true - in: query - name: notes - required: false - schema: - maxLength: 10000 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form - - description: Numeric tag ID - explode: true - in: path - name: tag_id - required: true - schema: - type: number - style: simple - - description: Numeric submission ID - explode: true - in: path - name: sub_id - required: true - schema: - type: number - style: simple - - description: Version identifier for the eTUFF tag data file ingested - explode: true - in: query - name: version - required: false - schema: - maxLength: 10 - minLength: 1 - pattern: ^(?!\s*$).+ - type: string - style: form + - $ref: '#/components/parameters/notes' + - $ref: '#/components/parameters/tagId' + - $ref: '#/components/parameters/subId' + - $ref: '#/components/parameters/version' responses: "200": content: @@ -400,22 +250,8 @@ paths: description: Get all events for a given tag submission operationId: list_events parameters: - - description: Numeric tag ID - explode: true - in: path - name: tag_id - required: true - schema: - type: number - style: simple - - description: Numeric submission ID - explode: true - in: path - name: sub_id - required: true - schema: - type: number - style: simple + - $ref: '#/components/parameters/tagId' + - $ref: '#/components/parameters/subId' responses: "200": content: @@ -443,10 +279,8 @@ components: required: true schema: type: number - style: simple filename: - description: Free-form text field to explicitly define the name of the file - to be persisted + description: Free-form text field to explicitly define the name of the file to be persisted explode: true in: query name: filename @@ -458,10 +292,7 @@ components: type: string style: form notes: - description: "Free-form text field where details of submitted eTUFF file for\ - \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ - \ and measurements + primary position data, or just secondary solution-positional\ - \ meta/data)" + description: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) explode: true in: query name: notes @@ -480,7 +311,6 @@ components: required: true schema: type: number - style: simple tagId: description: Numeric tag ID explode: true @@ -489,9 +319,8 @@ components: required: true schema: type: number - style: simple type: - description: "Type of file to be ingested, defaults to 'etuff'" + description: Type of file to be ingested, defaults to 'etuff' explode: true in: query name: type @@ -499,8 +328,8 @@ components: schema: default: etuff enum: - - etuff - - netcdf + - etuff + - netcdf type: string style: form version: @@ -518,80 +347,66 @@ components: schemas: filename: description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259_2.txt + example: "eTUFF-sailfish-117259_2.txt" title: filename type: string event200: description: Information for a given event example: - event_id: 1 + event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" tag_id: 1 submission_id: 1 - event_category: submission - event_name: new tag submission - time_start: 2022-04-01T04:58:21.319061+00:00 - time_end: 2022-04-01T04:59:21.319061+00:00 - duration: 0:01:00 - event_status: finished - event_notes: Some user defined notes... + event_category: "submission" + event_name: "new tag submission" + time_start: "2022-04-01T04:58:21.319061+00:00" + time_end: "2022-04-01T04:59:21.319061+00:00" + duration: "0:01:00" + event_status: "finished" + event_notes: "Some user defined notes..." properties: event_category: - description: '...' - example: '...' + description: ... + example: "..." type: string event_id: - description: Unique numeric event ID associated with the ingested tag data - file - example: 1 - title: event_id - type: integer + $ref: '#/components/schemas/event_id' event_name: - description: '...' - example: '...' + description: ... + example: "..." type: string event_notes: - description: Free-form text field where details of the event can be optionally - entered by the client - example: "The event represents a data anomaly with file XYZ because of ...,\ - \ this has been fixed in version ABC." + description: Free-form text field where details of the event can be optionally entered by the client + example: "The event represents a data anomaly with file XYZ because of ..., this has been fixed in version ABC." type: string event_status: - description: Free-form text field where details of the event can be optionally - entered by the client + description: Free-form text field where details of the event can be optionally entered by the client enum: - - failed - - finished - - killed - - migration - - postmigration - - premigration + - failed + - finished + - killed + - running example: failed type: string time_start: description: Local datetime stamp at the time of the event start - example: 2022-04-01T04:58:21.319061+00:00 + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' type: string time_end: description: Local datetime stamp at the time of the event end - example: 2022-04-01T04:58:21.319061+00:00 + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' type: string duration: - description: The event duration e.g. different between 'time_start' and - 'time_end' - example: 0:01:00 + description: The event duration e.g. different between 'time_start' and 'time_end' + example: "0:01:00" type: string submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data - file for ingest/importation into Tagbase - example: 5 - title: submission_id - type: integer + $ref: '#/components/schemas/submission_id' tag_id: - description: Unique numeric tag ID associated with the ingested tag data - file - example: 1 - title: tag_id - type: integer + $ref: '#/components/schemas/tag_id' title: event200 type: object eventPut200: @@ -611,22 +426,23 @@ components: title: eventPut200 type: object event_id: - description: Unique numeric event ID associated with the ingested tag data file - example: 1 + description: UUID associated with a particular event + example: "06335e84-2872-4914-8c5d-3ed07d2a2f16" + format: uuid title: event_id - type: integer + type: string events200: description: Response detailing all available events example: count: 3 events: - - event_id: 1 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2f16" tag_id: 1 submission_id: 1 - - event_id: 2 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" tag_id: 1 submission_id: 1 - - event_id: 3 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2fkf" tag_id: 1 submission_id: 1 properties: @@ -635,18 +451,29 @@ components: example: 3 type: integer events: - description: List of unique numeric event IDs and associated tag and submission - IDs - items: - $ref: '#/components/schemas/events200_events_inner' + description: List of event UUID's and associated tag and submission IDs type: array + items: + description: UUID and associated tag and submission ID + example: + event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" + tag_id: 1 + submission_id: 1 + properties: + event_id: + $ref: '#/components/schemas/event_id' + tag_id: + $ref: '#/components/schemas/tag_id' + submission_id: + $ref: '#/components/schemas/submission_id' + type: object title: tags200 type: object ingest200: description: HTTP 200 success response example: code: "200" - elapsed: 0:00:06.506691 + elapsed: '0:00:06.506691' message: Data file eTUFF-sailfish-117259.txt successfully ingested into Tagbase DB properties: @@ -692,8 +519,7 @@ components: title: response500 type: object submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data - file for ingest/importation into Tagbase + description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase example: 5 title: submission_id type: integer @@ -701,36 +527,67 @@ components: description: Information for a given tag example: tag: - - date_time: 2022-04-01T04:58:21.319061+00:00 - filename: eTUFF-sailfish-117259_2.txt + - date_time: '2022-04-01T04:58:21.319061+00:00' + filename: 'eTUFF-sailfish-117259_2.txt' metadata: person_owner: John Do owner_contect: john@do.net manufacturer: telemetry inc. model: new_gen attachment_method: anchor - notes: Ingested by admin on 2022-06-01 for Sailfish tagging campaign. + notes: 'Ingested by admin on 2022-06-01 for Sailfish tagging campaign.' submission_id: 5 tag_id: 3 - version: "1" - - date_time: 2022-06-01T05:39:46.896088+00:00 - filename: eTUFF-sailfish-117259_2.txt + version: '1' + - date_time: '2022-06-01T05:39:46.896088+00:00' + filename: 'eTUFF-sailfish-117259_2.txt' metadata: person_owner: Jane Do owner_contect: jane@do.net manufacturer: telemetry inc. model: newer_gen attachment_method: anchor - notes: Ingested by admin on 2022-06-01 for version 2 of the Sailfish tagging - campaign. + notes: 'Ingested by admin on 2022-06-01 for version 2 of the Sailfish tagging campaign.' submission_id: 6 tag_id: 3 - version: "2" + version: '2' properties: tag: description: List containing submissions for a given tag items: - $ref: '#/components/schemas/tag200_tag_inner' + type: object + properties: + date_time: + description: Local datetime stamp at the time of eTUFF tag data file ingestion + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + type: string + filename: + $ref: '#/components/schemas/filename' + metadata: + description: Contains the ingested tag metadata consistent with the eTUFF specification + example: + person_owner: John Do + owner_contect: john@do.net + manufacturer: telemetry inc. + model: new_gen + attachment_method: anchor + type: object + additionalProperties: + type: string + notes: + description: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solutionpositional meta/data) + example: "Ingested by admin on XXXX-XX-XX to back-process campaign XYZ." + type: string + submission_id: + $ref: '#/components/schemas/submission_id' + tag_id: + $ref: '#/components/schemas/tag_id' + version: + description: Version identifier for the eTUFF tag data file ingested + example: "1" + type: string type: array title: tag200 type: object @@ -771,107 +628,17 @@ components: type: integer tags: description: List of unique numeric Tag IDs and associated filename - items: - $ref: '#/components/schemas/tags200_tags_inner' type: array + items: + description: Unique numeric Tag ID associated with the ingested tag eTUFF data file + example: + tag_id: 1 + filename: eTUFF-sailfish-117259_2.txt + properties: + tag_id: + $ref: '#/components/schemas/tag_id' + filename: + $ref: '#/components/schemas/filename' + type: object title: tags200 type: object - events200_events_inner: - description: Unique numeric event ID and associated tag and submission IDs - example: - event_id: 1 - tag_id: 1 - submission_id: 1 - properties: - event_id: - description: Unique numeric event ID associated with the ingested tag data - file - example: 1 - title: event_id - type: integer - tag_id: - description: Unique numeric tag ID associated with the ingested tag data - file - example: 1 - title: tag_id - type: integer - submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data - file for ingest/importation into Tagbase - example: 5 - title: submission_id - type: integer - title: events200_events_inner - type: object - tag200_tag_inner: - properties: - date_time: - description: Local datetime stamp at the time of eTUFF tag data file ingestion - example: 2022-04-01T04:58:21.319061+00:00 - title: date_time - type: string - filename: - description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259_2.txt - title: filename - type: string - metadata: - additionalProperties: - type: string - description: Contains the ingested tag metadata consistent with the eTUFF - specification - example: - person_owner: John Do - owner_contect: john@do.net - manufacturer: telemetry inc. - model: new_gen - attachment_method: anchor - title: metadata - type: object - notes: - description: "Free-form text field where details of submitted eTUFF file\ - \ for ingest can be provided e.g. submitter name, etuff data contents\ - \ (tag metadata and measurements + primary position data, or just secondary\ - \ solutionpositional meta/data)" - example: Ingested by admin on XXXX-XX-XX to back-process campaign XYZ. - title: notes - type: string - submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data - file for ingest/importation into Tagbase - example: 5 - title: submission_id - type: integer - tag_id: - description: Unique numeric tag ID associated with the ingested tag data - file - example: 1 - title: tag_id - type: integer - version: - description: Version identifier for the eTUFF tag data file ingested - example: "1" - title: version - type: string - title: tag200_tag_inner - type: object - tags200_tags_inner: - description: Unique numeric Tag ID associated with the ingested tag eTUFF data - file - example: - tag_id: 1 - filename: eTUFF-sailfish-117259_2.txt - properties: - tag_id: - description: Unique numeric tag ID associated with the ingested tag data - file - example: 1 - title: tag_id - type: integer - filename: - description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259_2.txt - title: filename - type: string - title: tags200_tags_inner - type: object From 977ce316eda43c70447862773bbb587cb0a99f8c Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Sun, 12 Feb 2023 14:25:25 -0800 Subject: [PATCH 11/12] ISSUE-174 Create events_log table --- .gitignore | 1 + openapi.yaml | 10 +- rabbitmq/data/.bash_history | 10 - rabbitmq/data/.erlang.cookie | 1 - .../data/mnesia/rabbit@rabbitmq-feature_flags | 6 - .../rabbit@rabbitmq/cluster_nodes.config | 1 - .../coordination/rabbit@rabbitmq/00000024.wal | 1 - .../coordination/rabbit@rabbitmq/meta.dets | Bin 5464 -> 0 bytes .../coordination/rabbit@rabbitmq/names.dets | Bin 5464 -> 0 bytes .../vhosts/628WB79CIFDYO9LJI6DKMI09L/.config | 2 - .../vhosts/628WB79CIFDYO9LJI6DKMI09L/.vhost | 1 - .../msg_store_persistent/0.rdq | 0 .../msg_store_persistent/clean.dot | 2 - .../msg_store_persistent/file_summary.ets | Bin 607 -> 0 bytes .../msg_store_persistent/msg_store_index.ets | Bin 533 -> 0 bytes .../msg_store_transient/0.rdq | 0 .../msg_store_transient/clean.dot | 2 - .../msg_store_transient/file_summary.ets | Bin 607 -> 0 bytes .../msg_store_transient/msg_store_index.ets | Bin 533 -> 0 bytes .../628WB79CIFDYO9LJI6DKMI09L/recovery.dets | Bin 6000 -> 0 bytes .../rabbit@rabbitmq/nodes_running_at_shutdown | 1 - .../quorum/rabbit@rabbitmq/00000024.wal | 1 - .../quorum/rabbit@rabbitmq/meta.dets | Bin 5464 -> 0 bytes .../quorum/rabbit@rabbitmq/names.dets | Bin 5464 -> 0 bytes .../rabbit_durable_exchange.DCD | Bin 1327 -> 0 bytes .../rabbit@rabbitmq/rabbit_durable_queue.DCD | 1 - .../rabbit@rabbitmq/rabbit_durable_route.DCD | 1 - .../rabbit_runtime_parameters.DCD | Bin 193 -> 0 bytes .../data/mnesia/rabbit@rabbitmq/rabbit_serial | 1 - .../rabbit_topic_permission.DCD | 1 - .../mnesia/rabbit@rabbitmq/rabbit_user.DCD | Bin 230 -> 0 bytes .../rabbit_user_permission.DCD | Bin 188 -> 0 bytes .../mnesia/rabbit@rabbitmq/rabbit_vhost.DCD | Bin 170 -> 0 bytes .../data/mnesia/rabbit@rabbitmq/schema.DAT | Bin 27294 -> 0 bytes .../mnesia/rabbit@rabbitmq/schema_version | 1 - tagbase_server/.openapi-generator/VERSION | 2 +- .../controllers/events_controller.py | 11 +- .../tagbase_server/models/event200.py | 21 +- .../tagbase_server/models/events200.py | 4 +- .../models/events200_events_inner.py | 12 +- .../tagbase_server/openapi/openapi.yaml | 491 +++++++++++++----- 41 files changed, 390 insertions(+), 195 deletions(-) delete mode 100644 rabbitmq/data/.bash_history delete mode 100644 rabbitmq/data/.erlang.cookie delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/meta.dets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/names.dets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/.config delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/.vhost delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/0.rdq delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/clean.dot delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/file_summary.ets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_persistent/msg_store_index.ets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/0.rdq delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/clean.dot delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/file_summary.ets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/msg_store_index.ets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/recovery.dets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/meta.dets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/names.dets delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_exchange.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_runtime_parameters.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user_permission.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_vhost.DCD delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/schema.DAT delete mode 100644 rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version diff --git a/.gitignore b/.gitignore index 07524a4..aba50bd 100644 --- a/.gitignore +++ b/.gitignore @@ -70,3 +70,4 @@ postgis-data postgres-data logs tagbase_server/tagbase_server/coverage.xml +rabbitmq diff --git a/openapi.yaml b/openapi.yaml index 427e412..7e14e92 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -53,7 +53,7 @@ paths: description: Get information about an individual event operationId: get_event parameters: - - $ref: '#/components/parameters/eventId' + - $ref: '#/components/parameters/event_id' responses: "200": content: @@ -75,7 +75,7 @@ paths: description: Update notes for an event operationId: put_event parameters: - - $ref: '#/components/parameters/eventId' + - $ref: '#/components/parameters/event_id' - $ref: '#/components/parameters/notes' responses: "200": @@ -271,14 +271,14 @@ paths: x-openapi-router-controller: tagbase_server.controllers.events_controller components: parameters: - eventId: - description: Numeric event ID + event_id: + description: Event UUID explode: true in: path name: event_id required: true schema: - type: number + $ref: '#/components/schemas/event_id' filename: description: Free-form text field to explicitly define the name of the file to be persisted explode: true diff --git a/rabbitmq/data/.bash_history b/rabbitmq/data/.bash_history deleted file mode 100644 index 4950a33..0000000 --- a/rabbitmq/data/.bash_history +++ /dev/null @@ -1,10 +0,0 @@ -set -eo pipefail -rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running } -rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }' -rabbitmqctl -rabbitmqctl eval { true, rabbit_app_booted_and_running } -rabbitmqctl eval '{ true, rabbit_app_booted_and_running }' -rabbitmqctl eval -rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }, { [], no_alarms } = { rabbit:alarms(), no_alarms }, [] /= rabbit_networking:active_listeners(), rabbitmq_node_is_healthy.' -rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }, { [], no_alarms } = { rabbit:alarms(), no_alarms }, [] /= rabbit_networking:active_listeners(), rabbitmq_node_is_healthy.' -exit diff --git a/rabbitmq/data/.erlang.cookie b/rabbitmq/data/.erlang.cookie deleted file mode 100644 index d3f0f93..0000000 --- a/rabbitmq/data/.erlang.cookie +++ /dev/null @@ -1 +0,0 @@ -YWTZUIPTXZJYSVHEEOJU \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags b/rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags deleted file mode 100644 index 38c8ca2..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq-feature_flags +++ /dev/null @@ -1,6 +0,0 @@ -[classic_mirrored_queue_version,classic_queue_type_delivery_support, - direct_exchange_routing_v2,drop_unroutable_metric,empty_basic_get_metric, - feature_flags_v2,implicit_default_bindings,listener_records_in_ets, - maintenance_mode_status,quorum_queue,stream_queue, - stream_single_active_consumer,tracking_records_in_ets,user_limits, - virtual_host_metadata]. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config b/rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config deleted file mode 100644 index 178bfa9..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/cluster_nodes.config +++ /dev/null @@ -1 +0,0 @@ -{[rabbit@rabbitmq],[rabbit@rabbitmq]}. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal b/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal deleted file mode 100644 index 698b19c..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/00000024.wal +++ /dev/null @@ -1 +0,0 @@ -RAWA \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/meta.dets b/rabbitmq/data/mnesia/rabbit@rabbitmq/coordination/rabbit@rabbitmq/meta.dets deleted file mode 100644 index c1ae47e3b05f8128913ddbd12be673bebf7cc413..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5464 zcmZQz5Vc@fefB*A0|O%zb0U~vF`zIAFfuT~7!aDSiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!KiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!K4|_EY)hk28!*W$TA%+y2U)o@4HWyHfLLJ zELndot|IqHfdm^+l==k}m#($}&?`2|Md=|@_7OxxtgI2i%BBw=^akvDHv{+|bUqdSlD0bY%5qcX=1Ny)=3mwf}%fisAJ5cO_0ASFb63JPi(N^THD3pF8)<-pU z&aad9RD%K+Q8eZSG*^D*B4E&LxDZW%BsoG5iE(O$2u^joehUKskd1DyR$OoA<=m69i6wFHB-Iq!0`$_Wdao%J1Z8y}j#TNEJ)9nzMmoJ1Fv8htqyBck=uClcmd- zjc1m=f6dP$_dtOJ8&FjG5fqoMu>sI4HY`NtAyRe`L`1A?5W&iJ9o$=@4)7m3md?}& z-h*0i8W}JSTk#s@hi+*BXMgQdsy9Yov>l1r0yNTMo$Y*ORv1w1u#d-$zu`VO1YYP0 zxNnRR*mjMcvWcYITBoSz_hce8>Ew72c}0v>{4BIJ6($Z9AdSdFi1g7dw6*ghjfZiK zE`SW$SwiY2ue6NysS%|^eoqr6sJAdR+OE}D$AgCemYi{I3s+I-M6qp`m!#RhNlCSn z{*5;YT8}80V2W)u)L&>DC%xK7q>m!vD33dZs2|N8(Usg4edO-a=w;ab10pHQAGFHC ABme*a diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/msg_store_index.ets b/rabbitmq/data/mnesia/rabbit@rabbitmq/msg_stores/vhosts/628WB79CIFDYO9LJI6DKMI09L/msg_store_transient/msg_store_index.ets deleted file mode 100644 index 3af7cad30af1560175657d81a9ac72084d616819..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 533 zcmZ`$J5B>J5cO_0ASFb63JPi(N^St6g_@AY@@GQ4tUZhEkSr|{6_=slCY*o@Zo$}F z5P@)G`+df5JViOE)bwi1Sn)2eFVCMA2a8g((&!l*XtwKxlqR0_o4r?GHy^BAy=*_T z^6hhWk+g>z6u9U`V;(_sg-aK~2hE0a(F916Jr5!=PA$C$r@BpYf8|Yt|Jad2+j%Z>B~QX?0mjqj(B){A6BaZ(>O15%-0~371BV#6 zaT?ay6{t7C^3kCt!|$I-)Hv7TQOJfAKkZksYY7+xh)K3}2VILnl<6{RJw{N`pd+ST z%EqXKPpqiX^9TA#qD0YS#%*9Z&IHyGD5j>cb8M*7LbDx|HP!5AQ4@BOXPekm`j`wH Q=Y7y@N!5pWa)QMV=pu(iOt7?c-GD^l)!KySo`r zr{gDEsdPMcw3Db9ITJ-mfkR`L*G$#5vPmwLHT0auZgV9i4>RKLL=;vj-Laam;;pip zR?>81bFRh;3Y>LSVTF>)6mz4@EnnQ!Royy|51%)6rsYvK?jny_{a&Nb8#H?uJ)Q(Y zJiCA}&u$>Xvjd#3c&)Y>_aEKg{MP7`Gyp*)qZTVhV2c*OgkMxi6 diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown b/rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown deleted file mode 100644 index 2843977..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/nodes_running_at_shutdown +++ /dev/null @@ -1 +0,0 @@ -[rabbit@rabbitmq]. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal b/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal deleted file mode 100644 index 698b19c..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/00000024.wal +++ /dev/null @@ -1 +0,0 @@ -RAWA \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/meta.dets b/rabbitmq/data/mnesia/rabbit@rabbitmq/quorum/rabbit@rabbitmq/meta.dets deleted file mode 100644 index c1ae47e3b05f8128913ddbd12be673bebf7cc413..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5464 zcmZQz5Vc@fefB*A0|O%zb0U~vF`zIAFfuT~7!aDSiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!KiJR2M?+wQhXC(r{vY8fVAT7gAut*OqaiRF0;3@?8UmvsFd71*Aut*O dqaiRF0wW>>1ehV?rV7xpNFE5^z!K+X1b}{rZ!E%yC=bS@F?QJhw*JZnY4Bh zM3Aubl$lA${PNF#MteahZmy;Pu;kav%Lf@FO!9KQlEjcS!afRI*}?{bMRKx%_&hmX zBxeYRnsJ_)i*6|@8E}C3i9_)C!6Ax1?VYdPoil_3x)+kEHObH>n(DIAf(mD!r%vUq9fmfBmy_Gk!%UNKqt^$+IQiK}V;C0m3l8~{mH+?% diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD deleted file mode 100644 index f8dd237..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_queue.DCD +++ /dev/null @@ -1 +0,0 @@ -cXM \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD deleted file mode 100644 index f8dd237..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_durable_route.DCD +++ /dev/null @@ -1 +0,0 @@ -cXM \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_runtime_parameters.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_runtime_parameters.DCD deleted file mode 100644 index a8c88b9a7679106f6961c370667f6dc3859d7b7c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 193 zcmXwxu?oU46h)t=R?tDve{c|5tafp;n@Am;)Imbh7n<0nYT8nqodo~RA2Lv#?%}{a zoi1^b<8BB5F+ar7Q@RL){F+iJ7z%^X#{{Vr8IZuAXW)kZBJh_8Bf~gX)w&&hmxKej zZw}=3;m~uYws~%w1YvCIqEcGW8#7FcN|+L1q6!NI%W0C=rA4Yx+uQ8l^nTt`=#?U( S=}D#Ge)VX!>=MP*MmAq}!aF|z diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial deleted file mode 100644 index df11948..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_serial +++ /dev/null @@ -1 +0,0 @@ -24. diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD deleted file mode 100644 index f8dd237..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_topic_permission.DCD +++ /dev/null @@ -1 +0,0 @@ -cXM \ No newline at end of file diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_user.DCD deleted file mode 100644 index 86a9345e0561faea4fe43ef8068e8261b31b0dbe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 230 zcmZQ%VrEH>@a1M;VDL!__i=2_U`t`(%E?cU&qz&7Ni9lYU{6U-i3bW~GcX(K8DulC zndliA>KUgn@E0W}C1sX4K&afp4CW*T2DY9g2IkJoNeryh>wso9!Oi5&%qvMP%1g|N zFD*_j$_2`?rkAD`mw;F*lCmiQn^_8_w!dYXRqyfV<8I&D!~*LjM=v+Tzd9Q*=YvS1 zRzwa^A!7;yZ(>SrW?p7-Nl{`+eo@a1M;VDL!__i=2_U`t`(%E?cU&qz&7Ni9lYU{6U-i3bW~GcX(K8DulC zndliA>KUgn@E0W}C1sX4K&afp4CW*T2DY9g2IkJoNery>U4UjrK+I$YsxB=~Es8Hl vEy~R-F3!x)%K(aTfyK%)@{3Dyfr?qvOH&~%M*R#Hpfo}?NSaAc3zY%@)!Q-$ diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_vhost.DCD b/rabbitmq/data/mnesia/rabbit@rabbitmq/rabbit_vhost.DCD deleted file mode 100644 index 186fdeb93cc7cd1bfeaea929ec492c05b396a291..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 170 zcmZQ%VrEH>@a1M;VDL!__i=2_U`t`(%E?cU&qz&7Ni9lYU{6U-i3bW~GcX(K8DulC zndliA>KUgn@E0W}C1sX4K&afp4CW*T2DY9g2IkJoNerwPLV#wvLCj=HVPGxG$S*F* z1&T51XO#dcrW6M5l+@znqRfJl%=|o%jEGBWT4HHVi9%UsQAufHjsi$a3Ij_?VtR2F E0PZ?3d;kCd diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/schema.DAT b/rabbitmq/data/mnesia/rabbit@rabbitmq/schema.DAT deleted file mode 100644 index 9ce3422562b9da84ee10d63973fb7c02e7ed2f80..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 27294 zcmeHPON<;x8LsYm?Ci|0XERyv+5|sxa3aJ*Ai~3kDCVUohX9dC2q8;T(_NYAbw9@a zSli-Y0tY}s0tvXn0WRP(QZC3yLE^*(i3oySb=A15r+c#Nnckh9 zt&v7E_2}yEzy9xk)nA2>{SvzTmv<0C8vian_=|L?TT8wgEAT$>ll!SdZ47q=r!xP!!mA6j)aH-*+Dx9-4jP;^7|$o`0pUTd3+`mkg^}O>}VpHIJS?C)rY>^q<`kER30K6=1*+CQon^7_W zJRIA#qtHSPmqggY5#Axgbi&97J|g28>C30zdzmxR{X3H}lD?MAzkW!b{}!Kr%Iewa zPq)qe>@V9OKLQV?n35ug3u-Xiv?-A$%L5#S=c#n*5?NCsJ>u^0#hGQctFKcAb)M+u7LcNyOSw}Nf=69yRRrvr2p4B{v)vH%`pO?sNdX6 zfR!v|@oe9=mlh6M@<@pybe(tVKzX;psI#|M%qi zKg#hR64y4y3W4IDiZf^=Z#N{un?L-UkOmCb`VTLQ*cpS*Y2+6GGvH z5(;I*Y2S@sh=11(0zaU>Ts$z$zU^4yfS4or&+m3%@V{34{-a``!a{$`$t83#a9~>? z9EQX#1^=CwSHH!bM&sQnIgR^{%hdxHQ9xhk`oAsff8o`q^c&YzSrJg&^T0@f&M1{~ z3;{3=mY5;18@|AkPkiQ^0`~t=urFf&17EKbj{glA|6*NFg#O3{vyZ*rhyzg6FNOVC z$Pk=q z$XBKr#?kAE{@4B|&%f~M1Bg@GF6>9Zu)hav)`0DdNpA)z?m3=A7@5+kALERK*~|#( zj8Z3G5G0Ym5>tZSIQ26j2(G@T4D=B-rz#34pkL?y|64Nt#rnVS|IfuF9&!G2JIy`n z8`wR^$dQBf)uGZaa`~y{;KZT5MGXJEM*`XNudpS7X6RsOhyUQ%x5JB`X{2j9bpIlx zWw69d)-XO!umHL|8B2;7)WQhUAjF|0o?!4ThZcJ5!ObA^o8>baVLy&6cwm~?nCx{E z0SCIriR}b!(jS8jT8aw1ac5Dez)MQT72tWN)%qds|JP;w3;*7+L)bn$x(6`Zo<@Ba z_ZT!D>edG+_t>0{j)xP_;l^w8CG#tGCHNOA1vMA$UqN_D(|2gJ zoE|MeEGBSverA9&0oZo;T{i*#^oJh^6X3*7On^#qhy+(!!viS#l`cZ{A9qp3yVOO^ zZ{LAa7WA471QFp%E%DL}u54JOpj!$Y^SwDDod`$ilcOaeAeJP;(RP3!)tH8zKyca% zR$u(i+al}jidz0Ycxp|(@ir+?eVzONYW;8gF+pnmZ}b2sD&$4{JAG1-{t;P+cKBv8 zc1%ibHZh#z zMIsj3*XWb1u=BOx=qxi10^-s1lPa8lMK+neET3xitg~kEm5EEr<=m#&RoxT?W`Y9M z<6Qq|_WvB`BAXfME~a^E9`-;KvH0%DBH;UUPmA)|juAX$KwvGOPWe)u#|rC|lrc@+ zV`mhWlp?V7%0C7|Jm1=RL{O~@qn(F`=4V~_NU}PB;Plnc!KtCo!S@AvdguzTUqZ-D zmk%Knr^_n{{UTj{1ZjuUM5ieT diff --git a/rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version b/rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version deleted file mode 100644 index 203a50c..0000000 --- a/rabbitmq/data/mnesia/rabbit@rabbitmq/schema_version +++ /dev/null @@ -1 +0,0 @@ -[store_msg,persistent_bytes,multiple_routing_keys,exchange_options,queue_options,topic_permission,vhost_limits,user_password_hashing,cluster_name,policy_apply_to,topic_trie_node,mirrored_supervisor,gm,user_admin_to_tags,exchange_event_serial,semi_durable_route,topic_trie,add_opts_to_listener,remove_user_scope,move_messages_to_vhost_store]. diff --git a/tagbase_server/.openapi-generator/VERSION b/tagbase_server/.openapi-generator/VERSION index 0df17dd..e7e42a4 100644 --- a/tagbase_server/.openapi-generator/VERSION +++ b/tagbase_server/.openapi-generator/VERSION @@ -1 +1 @@ -6.2.1 \ No newline at end of file +6.3.0 \ No newline at end of file diff --git a/tagbase_server/tagbase_server/controllers/events_controller.py b/tagbase_server/tagbase_server/controllers/events_controller.py index 7ae3946..9a0c748 100644 --- a/tagbase_server/tagbase_server/controllers/events_controller.py +++ b/tagbase_server/tagbase_server/controllers/events_controller.py @@ -11,8 +11,9 @@ def get_event(event_id): # noqa: E501 Get information about an individual event # noqa: E501 - :param event_id: Numeric event ID - :type event_id: + :param event_id: Event UUID + :type event_id: str + :type event_id: str :rtype: Union[Event200, Tuple[Event200, int], Tuple[Event200, int, Dict[str, str]] """ @@ -68,6 +69,7 @@ def list_all_events(): # noqa: E501 "SELECT COUNT(DISTINCT event_id) FROM events_log", ) count = cur.fetchone()[0] + print(events) return Events200.from_dict({"count": count, "events": events}) @@ -112,8 +114,9 @@ def put_event(event_id, notes=None): # noqa: E501 Update notes for an event # noqa: E501 - :param event_id: Numeric event ID - :type event_id: + :param event_id: Event UUID + :type event_id: str + :type event_id: str :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) :type notes: str diff --git a/tagbase_server/tagbase_server/models/event200.py b/tagbase_server/tagbase_server/models/event200.py index 1d53ea2..ddf591a 100644 --- a/tagbase_server/tagbase_server/models/event200.py +++ b/tagbase_server/tagbase_server/models/event200.py @@ -33,7 +33,7 @@ def __init__( :param event_category: The event_category of this Event200. # noqa: E501 :type event_category: str :param event_id: The event_id of this Event200. # noqa: E501 - :type event_id: int + :type event_id: str :param event_name: The event_name of this Event200. # noqa: E501 :type event_name: str :param event_notes: The event_notes of this Event200. # noqa: E501 @@ -53,7 +53,7 @@ def __init__( """ self.openapi_types = { "event_category": str, - "event_id": int, + "event_id": str, "event_name": str, "event_notes": str, "event_status": str, @@ -126,10 +126,10 @@ def event_category(self, event_category): def event_id(self): """Gets the event_id of this Event200. - Unique numeric event ID associated with the ingested tag data file # noqa: E501 + UUID associated with a particular event # noqa: E501 :return: The event_id of this Event200. - :rtype: int + :rtype: str """ return self._event_id @@ -137,10 +137,10 @@ def event_id(self): def event_id(self, event_id): """Sets the event_id of this Event200. - Unique numeric event ID associated with the ingested tag data file # noqa: E501 + UUID associated with a particular event # noqa: E501 :param event_id: The event_id of this Event200. - :type event_id: int + :type event_id: str """ self._event_id = event_id @@ -211,14 +211,7 @@ def event_status(self, event_status): :param event_status: The event_status of this Event200. :type event_status: str """ - allowed_values = [ - "failed", - "finished", - "killed", - "migration", - "postmigration", - "premigration", - ] # noqa: E501 + allowed_values = ["failed", "finished", "killed", "running"] # noqa: E501 if event_status not in allowed_values: raise ValueError( "Invalid value for `event_status` ({0}), must be one of {1}".format( diff --git a/tagbase_server/tagbase_server/models/events200.py b/tagbase_server/tagbase_server/models/events200.py index 36b67ad..7563619 100644 --- a/tagbase_server/tagbase_server/models/events200.py +++ b/tagbase_server/tagbase_server/models/events200.py @@ -73,7 +73,7 @@ def count(self, count): def events(self): """Gets the events of this Events200. - List of unique numeric event IDs and associated tag and submission IDs # noqa: E501 + List of event UUID's and associated tag and submission IDs # noqa: E501 :return: The events of this Events200. :rtype: List[Events200EventsInner] @@ -84,7 +84,7 @@ def events(self): def events(self, events): """Sets the events of this Events200. - List of unique numeric event IDs and associated tag and submission IDs # noqa: E501 + List of event UUID's and associated tag and submission IDs # noqa: E501 :param events: The events of this Events200. :type events: List[Events200EventsInner] diff --git a/tagbase_server/tagbase_server/models/events200_events_inner.py b/tagbase_server/tagbase_server/models/events200_events_inner.py index c8599b2..636d92d 100644 --- a/tagbase_server/tagbase_server/models/events200_events_inner.py +++ b/tagbase_server/tagbase_server/models/events200_events_inner.py @@ -19,13 +19,13 @@ def __init__(self, event_id=None, tag_id=None, submission_id=None): # noqa: E50 """Events200EventsInner - a model defined in OpenAPI :param event_id: The event_id of this Events200EventsInner. # noqa: E501 - :type event_id: int + :type event_id: str :param tag_id: The tag_id of this Events200EventsInner. # noqa: E501 :type tag_id: int :param submission_id: The submission_id of this Events200EventsInner. # noqa: E501 :type submission_id: int """ - self.openapi_types = {"event_id": int, "tag_id": int, "submission_id": int} + self.openapi_types = {"event_id": str, "tag_id": int, "submission_id": int} self.attribute_map = { "event_id": "event_id", @@ -52,10 +52,10 @@ def from_dict(cls, dikt) -> "Events200EventsInner": def event_id(self): """Gets the event_id of this Events200EventsInner. - Unique numeric event ID associated with the ingested tag data file # noqa: E501 + UUID associated with a particular event # noqa: E501 :return: The event_id of this Events200EventsInner. - :rtype: int + :rtype: str """ return self._event_id @@ -63,10 +63,10 @@ def event_id(self): def event_id(self, event_id): """Sets the event_id of this Events200EventsInner. - Unique numeric event ID associated with the ingested tag data file # noqa: E501 + UUID associated with a particular event # noqa: E501 :param event_id: The event_id of this Events200EventsInner. - :type event_id: int + :type event_id: str """ self._event_id = event_id diff --git a/tagbase_server/tagbase_server/openapi/openapi.yaml b/tagbase_server/tagbase_server/openapi/openapi.yaml index 427e412..3b1d8cc 100644 --- a/tagbase_server/tagbase_server/openapi/openapi.yaml +++ b/tagbase_server/tagbase_server/openapi/openapi.yaml @@ -53,7 +53,14 @@ paths: description: Get information about an individual event operationId: get_event parameters: - - $ref: '#/components/parameters/eventId' + - description: Event UUID + explode: true + in: path + name: event_id + required: true + schema: + $ref: '#/components/schemas/event_id' + style: simple responses: "200": content: @@ -75,8 +82,28 @@ paths: description: Update notes for an event operationId: put_event parameters: - - $ref: '#/components/parameters/eventId' - - $ref: '#/components/parameters/notes' + - description: Event UUID + explode: true + in: path + name: event_id + required: true + schema: + $ref: '#/components/schemas/event_id' + style: simple + - description: "Free-form text field where details of submitted eTUFF file for\ + \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ + \ and measurements + primary position data, or just secondary solution-positional\ + \ meta/data)" + explode: true + in: query + name: notes + required: false + schema: + maxLength: 10000 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form responses: "200": content: @@ -99,8 +126,8 @@ paths: description: Get network accessible file and execute ingestion operationId: ingest_get parameters: - - description: Location of a network accessible (file, ftp, http, https) - file e.g. 'file:///usr/src/app/data/eTUFF-sailfish-117259.txt'. + - description: "Location of a network accessible (file, ftp, http, https) file\ + \ e.g. 'file:///usr/src/app/data/eTUFF-sailfish-117259.txt'." explode: true in: query name: file @@ -111,9 +138,43 @@ paths: pattern: ^(?!\s*$).+ type: string style: form - - $ref: '#/components/parameters/notes' - - $ref: '#/components/parameters/type' - - $ref: '#/components/parameters/version' + - description: "Free-form text field where details of submitted eTUFF file for\ + \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ + \ and measurements + primary position data, or just secondary solution-positional\ + \ meta/data)" + explode: true + in: query + name: notes + required: false + schema: + maxLength: 10000 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form + - description: "Type of file to be ingested, defaults to 'etuff'" + explode: true + in: query + name: type + required: false + schema: + default: etuff + enum: + - etuff + - netcdf + type: string + style: form + - description: Version identifier for the eTUFF tag data file ingested + explode: true + in: query + name: version + required: false + schema: + maxLength: 10 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form responses: "200": content: @@ -135,10 +196,55 @@ paths: description: Post a local file and perform a ingest operation operationId: ingest_post parameters: - - $ref: '#/components/parameters/filename' - - $ref: '#/components/parameters/notes' - - $ref: '#/components/parameters/type' - - $ref: '#/components/parameters/version' + - description: Free-form text field to explicitly define the name of the file + to be persisted + explode: true + in: query + name: filename + required: true + schema: + maxLength: 100 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form + - description: "Free-form text field where details of submitted eTUFF file for\ + \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ + \ and measurements + primary position data, or just secondary solution-positional\ + \ meta/data)" + explode: true + in: query + name: notes + required: false + schema: + maxLength: 10000 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form + - description: "Type of file to be ingested, defaults to 'etuff'" + explode: true + in: query + name: type + required: false + schema: + default: etuff + enum: + - etuff + - netcdf + type: string + style: form + - description: Version identifier for the eTUFF tag data file ingested + explode: true + in: query + name: version + required: false + schema: + maxLength: 10 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form requestBody: content: application/octet-stream: @@ -201,7 +307,14 @@ paths: description: Get information about an individual tag operationId: get_tag parameters: - - $ref: '#/components/parameters/tagId' + - description: Numeric tag ID + explode: true + in: path + name: tag_id + required: true + schema: + type: number + style: simple responses: "200": content: @@ -224,10 +337,47 @@ paths: description: Update a tag submission operationId: put_tag parameters: - - $ref: '#/components/parameters/notes' - - $ref: '#/components/parameters/tagId' - - $ref: '#/components/parameters/subId' - - $ref: '#/components/parameters/version' + - description: "Free-form text field where details of submitted eTUFF file for\ + \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ + \ and measurements + primary position data, or just secondary solution-positional\ + \ meta/data)" + explode: true + in: query + name: notes + required: false + schema: + maxLength: 10000 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form + - description: Numeric tag ID + explode: true + in: path + name: tag_id + required: true + schema: + type: number + style: simple + - description: Numeric submission ID + explode: true + in: path + name: sub_id + required: true + schema: + type: number + style: simple + - description: Version identifier for the eTUFF tag data file ingested + explode: true + in: query + name: version + required: false + schema: + maxLength: 10 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form responses: "200": content: @@ -250,8 +400,22 @@ paths: description: Get all events for a given tag submission operationId: list_events parameters: - - $ref: '#/components/parameters/tagId' - - $ref: '#/components/parameters/subId' + - description: Numeric tag ID + explode: true + in: path + name: tag_id + required: true + schema: + type: number + style: simple + - description: Numeric submission ID + explode: true + in: path + name: sub_id + required: true + schema: + type: number + style: simple responses: "200": content: @@ -271,16 +435,18 @@ paths: x-openapi-router-controller: tagbase_server.controllers.events_controller components: parameters: - eventId: - description: Numeric event ID + event_id: + description: Event UUID explode: true in: path name: event_id required: true schema: - type: number + $ref: '#/components/schemas/event_id' + style: simple filename: - description: Free-form text field to explicitly define the name of the file to be persisted + description: Free-form text field to explicitly define the name of the file + to be persisted explode: true in: query name: filename @@ -292,7 +458,10 @@ components: type: string style: form notes: - description: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) + description: "Free-form text field where details of submitted eTUFF file for\ + \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ + \ and measurements + primary position data, or just secondary solution-positional\ + \ meta/data)" explode: true in: query name: notes @@ -311,6 +480,7 @@ components: required: true schema: type: number + style: simple tagId: description: Numeric tag ID explode: true @@ -319,8 +489,9 @@ components: required: true schema: type: number + style: simple type: - description: Type of file to be ingested, defaults to 'etuff' + description: "Type of file to be ingested, defaults to 'etuff'" explode: true in: query name: type @@ -328,8 +499,8 @@ components: schema: default: etuff enum: - - etuff - - netcdf + - etuff + - netcdf type: string style: form version: @@ -347,66 +518,78 @@ components: schemas: filename: description: Full name and extension of the ingested eTUFF tag data file - example: "eTUFF-sailfish-117259_2.txt" + example: eTUFF-sailfish-117259_2.txt title: filename type: string event200: description: Information for a given event example: - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" + event_id: 06335e84-2872-4914-8c5d-3ed07d2a2dj4 tag_id: 1 submission_id: 1 - event_category: "submission" - event_name: "new tag submission" - time_start: "2022-04-01T04:58:21.319061+00:00" - time_end: "2022-04-01T04:59:21.319061+00:00" - duration: "0:01:00" - event_status: "finished" - event_notes: "Some user defined notes..." + event_category: submission + event_name: new tag submission + time_start: 2022-04-01T04:58:21.319061+00:00 + time_end: 2022-04-01T04:59:21.319061+00:00 + duration: 0:01:00 + event_status: finished + event_notes: Some user defined notes... properties: event_category: - description: ... - example: "..." + description: '...' + example: '...' type: string event_id: - $ref: '#/components/schemas/event_id' + description: UUID associated with a particular event + example: 06335e84-2872-4914-8c5d-3ed07d2a2f16 + format: uuid + title: event_id + type: string event_name: - description: ... - example: "..." + description: '...' + example: '...' type: string event_notes: - description: Free-form text field where details of the event can be optionally entered by the client - example: "The event represents a data anomaly with file XYZ because of ..., this has been fixed in version ABC." + description: Free-form text field where details of the event can be optionally + entered by the client + example: "The event represents a data anomaly with file XYZ because of ...,\ + \ this has been fixed in version ABC." type: string event_status: - description: Free-form text field where details of the event can be optionally entered by the client + description: Free-form text field where details of the event can be optionally + entered by the client enum: - - failed - - finished - - killed - - running + - failed + - finished + - killed + - running example: failed type: string time_start: description: Local datetime stamp at the time of the event start - example: '2022-04-01T04:58:21.319061+00:00' - #format: date - #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + example: 2022-04-01T04:58:21.319061+00:00 type: string time_end: description: Local datetime stamp at the time of the event end - example: '2022-04-01T04:58:21.319061+00:00' - #format: date - #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + example: 2022-04-01T04:58:21.319061+00:00 type: string duration: - description: The event duration e.g. different between 'time_start' and 'time_end' - example: "0:01:00" + description: The event duration e.g. different between 'time_start' and + 'time_end' + example: 0:01:00 type: string submission_id: - $ref: '#/components/schemas/submission_id' + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer tag_id: - $ref: '#/components/schemas/tag_id' + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer title: event200 type: object eventPut200: @@ -427,7 +610,7 @@ components: type: object event_id: description: UUID associated with a particular event - example: "06335e84-2872-4914-8c5d-3ed07d2a2f16" + example: 06335e84-2872-4914-8c5d-3ed07d2a2f16 format: uuid title: event_id type: string @@ -436,13 +619,13 @@ components: example: count: 3 events: - - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2f16" + - event_id: 06335e84-2872-4914-8c5d-3ed07d2a2f16 tag_id: 1 submission_id: 1 - - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" + - event_id: 06335e84-2872-4914-8c5d-3ed07d2a2dj4 tag_id: 1 submission_id: 1 - - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2fkf" + - event_id: 06335e84-2872-4914-8c5d-3ed07d2a2fkf tag_id: 1 submission_id: 1 properties: @@ -452,28 +635,16 @@ components: type: integer events: description: List of event UUID's and associated tag and submission IDs - type: array items: - description: UUID and associated tag and submission ID - example: - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" - tag_id: 1 - submission_id: 1 - properties: - event_id: - $ref: '#/components/schemas/event_id' - tag_id: - $ref: '#/components/schemas/tag_id' - submission_id: - $ref: '#/components/schemas/submission_id' - type: object + $ref: '#/components/schemas/events200_events_inner' + type: array title: tags200 type: object ingest200: description: HTTP 200 success response example: code: "200" - elapsed: '0:00:06.506691' + elapsed: 0:00:06.506691 message: Data file eTUFF-sailfish-117259.txt successfully ingested into Tagbase DB properties: @@ -519,7 +690,8 @@ components: title: response500 type: object submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase example: 5 title: submission_id type: integer @@ -527,67 +699,36 @@ components: description: Information for a given tag example: tag: - - date_time: '2022-04-01T04:58:21.319061+00:00' - filename: 'eTUFF-sailfish-117259_2.txt' + - date_time: 2022-04-01T04:58:21.319061+00:00 + filename: eTUFF-sailfish-117259_2.txt metadata: person_owner: John Do owner_contect: john@do.net manufacturer: telemetry inc. model: new_gen attachment_method: anchor - notes: 'Ingested by admin on 2022-06-01 for Sailfish tagging campaign.' + notes: Ingested by admin on 2022-06-01 for Sailfish tagging campaign. submission_id: 5 tag_id: 3 - version: '1' - - date_time: '2022-06-01T05:39:46.896088+00:00' - filename: 'eTUFF-sailfish-117259_2.txt' + version: "1" + - date_time: 2022-06-01T05:39:46.896088+00:00 + filename: eTUFF-sailfish-117259_2.txt metadata: person_owner: Jane Do owner_contect: jane@do.net manufacturer: telemetry inc. model: newer_gen attachment_method: anchor - notes: 'Ingested by admin on 2022-06-01 for version 2 of the Sailfish tagging campaign.' + notes: Ingested by admin on 2022-06-01 for version 2 of the Sailfish tagging + campaign. submission_id: 6 tag_id: 3 - version: '2' + version: "2" properties: tag: description: List containing submissions for a given tag items: - type: object - properties: - date_time: - description: Local datetime stamp at the time of eTUFF tag data file ingestion - example: '2022-04-01T04:58:21.319061+00:00' - #format: date - #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' - type: string - filename: - $ref: '#/components/schemas/filename' - metadata: - description: Contains the ingested tag metadata consistent with the eTUFF specification - example: - person_owner: John Do - owner_contect: john@do.net - manufacturer: telemetry inc. - model: new_gen - attachment_method: anchor - type: object - additionalProperties: - type: string - notes: - description: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solutionpositional meta/data) - example: "Ingested by admin on XXXX-XX-XX to back-process campaign XYZ." - type: string - submission_id: - $ref: '#/components/schemas/submission_id' - tag_id: - $ref: '#/components/schemas/tag_id' - version: - description: Version identifier for the eTUFF tag data file ingested - example: "1" - type: string + $ref: '#/components/schemas/tag200_tag_inner' type: array title: tag200 type: object @@ -628,17 +769,107 @@ components: type: integer tags: description: List of unique numeric Tag IDs and associated filename - type: array items: - description: Unique numeric Tag ID associated with the ingested tag eTUFF data file - example: - tag_id: 1 - filename: eTUFF-sailfish-117259_2.txt - properties: - tag_id: - $ref: '#/components/schemas/tag_id' - filename: - $ref: '#/components/schemas/filename' - type: object + $ref: '#/components/schemas/tags200_tags_inner' + type: array title: tags200 type: object + events200_events_inner: + description: UUID and associated tag and submission ID + example: + event_id: 06335e84-2872-4914-8c5d-3ed07d2a2dj4 + tag_id: 1 + submission_id: 1 + properties: + event_id: + description: UUID associated with a particular event + example: 06335e84-2872-4914-8c5d-3ed07d2a2f16 + format: uuid + title: event_id + type: string + tag_id: + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + title: events200_events_inner + type: object + tag200_tag_inner: + properties: + date_time: + description: Local datetime stamp at the time of eTUFF tag data file ingestion + example: 2022-04-01T04:58:21.319061+00:00 + title: date_time + type: string + filename: + description: Full name and extension of the ingested eTUFF tag data file + example: eTUFF-sailfish-117259_2.txt + title: filename + type: string + metadata: + additionalProperties: + type: string + description: Contains the ingested tag metadata consistent with the eTUFF + specification + example: + person_owner: John Do + owner_contect: john@do.net + manufacturer: telemetry inc. + model: new_gen + attachment_method: anchor + title: metadata + type: object + notes: + description: "Free-form text field where details of submitted eTUFF file\ + \ for ingest can be provided e.g. submitter name, etuff data contents\ + \ (tag metadata and measurements + primary position data, or just secondary\ + \ solutionpositional meta/data)" + example: Ingested by admin on XXXX-XX-XX to back-process campaign XYZ. + title: notes + type: string + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + tag_id: + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer + version: + description: Version identifier for the eTUFF tag data file ingested + example: "1" + title: version + type: string + title: tag200_tag_inner + type: object + tags200_tags_inner: + description: Unique numeric Tag ID associated with the ingested tag eTUFF data + file + example: + tag_id: 1 + filename: eTUFF-sailfish-117259_2.txt + properties: + tag_id: + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer + filename: + description: Full name and extension of the ingested eTUFF tag data file + example: eTUFF-sailfish-117259_2.txt + title: filename + type: string + title: tags200_tags_inner + type: object From ec290759db312bb03fd7b2e30c31bacdcafbbe43 Mon Sep 17 00:00:00 2001 From: Lewis John McGibbney Date: Sun, 12 Feb 2023 15:52:09 -0800 Subject: [PATCH 12/12] ISSUE-174 Create events_log table --- .gitignore | 1 + services/postgis/tagbase_schema.sql | 4 +-- services/subscriber/rabbitmq_subscriber.py | 7 +++-- .../controllers/events_controller.py | 26 ++++++++--------- .../controllers/tags_controller.py | 4 --- .../tagbase_server/utils/processing_utils.py | 28 +++++++++++-------- .../tagbase_server/utils/rabbitmq_utils.py | 4 +-- 7 files changed, 38 insertions(+), 36 deletions(-) diff --git a/.gitignore b/.gitignore index aba50bd..44cc897 100644 --- a/.gitignore +++ b/.gitignore @@ -71,3 +71,4 @@ postgres-data logs tagbase_server/tagbase_server/coverage.xml rabbitmq +dbbackups diff --git a/services/postgis/tagbase_schema.sql b/services/postgis/tagbase_schema.sql index b258edd..868ee4c 100644 --- a/services/postgis/tagbase_schema.sql +++ b/services/postgis/tagbase_schema.sql @@ -522,8 +522,8 @@ ALTER TABLE ONLY proc_observations ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_variable_id_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); -ALTER TABLE ONLY events_log - ADD CONSTRAINT eventslog_submission_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); +-- ALTER TABLE ONLY events_log +-- ADD CONSTRAINT eventslog_submission_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); -- -- The following TRIGGER ensures that upon ingestion of an eTUFF file into tagbase-server, diff --git a/services/subscriber/rabbitmq_subscriber.py b/services/subscriber/rabbitmq_subscriber.py index 372139c..4d9d4c6 100644 --- a/services/subscriber/rabbitmq_subscriber.py +++ b/services/subscriber/rabbitmq_subscriber.py @@ -36,7 +36,7 @@ def process_topic(topic=None, msg_parts=None): import db_utils import uuid - if topic == "event_log/create": + if topic == "events_log/create": db_utils.create_event( event_category=msg_parts[0], event_id=uuid.UUID(msg_parts[1]), @@ -45,6 +45,7 @@ def process_topic(topic=None, msg_parts=None): time_start=msg_parts[4], ) else: + logger.info(msg_parts) db_utils.update_event( duration=msg_parts[0], event_id=uuid.UUID(msg_parts[1]), @@ -58,7 +59,7 @@ def process_topic(topic=None, msg_parts=None): def subscriber(): connection = pika.BlockingConnection(pika.ConnectionParameters(host="rabbitmq")) channel = connection.channel() - channel.queue_declare(queue="event_log") + channel.queue_declare(queue="events_log") def callback(ch, method, properties, body): logger.info("Received: %r" % body) @@ -66,7 +67,7 @@ def callback(ch, method, properties, body): process_topic(topic, messagedata.split(" ")) channel.basic_consume( - queue="event_log", on_message_callback=callback, auto_ack=True + queue="events_log", on_message_callback=callback, auto_ack=True ) logger.info("Waiting for messages...") channel.start_consuming() diff --git a/tagbase_server/tagbase_server/controllers/events_controller.py b/tagbase_server/tagbase_server/controllers/events_controller.py index 9a0c748..a893eb5 100644 --- a/tagbase_server/tagbase_server/controllers/events_controller.py +++ b/tagbase_server/tagbase_server/controllers/events_controller.py @@ -5,6 +5,10 @@ from tagbase_server.utils.db_utils import connect from tagbase_server import util +import logging + +logger = logging.getLogger(__name__) + def get_event(event_id): # noqa: E501 """Get information about an individual event @@ -25,19 +29,18 @@ def get_event(event_id): # noqa: E501 (event_id,), ) result = cur.fetchone() - logger.info(result) return Event200.from_dict( { - "event_category": result[0], - "event_id": str(result[1]), - "event_name": result[2], - "event_notes": result[3], - "event_status": result[4], + "submission_id": result[0], + "tag_id": result[1], + "event_id": str(result[2]), + "event_category": result[3], + "event_name": result[4], "time_start": result[5], "time_end": result[6], "duration": result[7], - "submission_id": result[8], - "tag_id": result[9], + "event_status": result[8], + "event_notes": result[9], } ) @@ -69,7 +72,6 @@ def list_all_events(): # noqa: E501 "SELECT COUNT(DISTINCT event_id) FROM events_log", ) count = cur.fetchone()[0] - print(events) return Events200.from_dict({"count": count, "events": events}) @@ -89,7 +91,7 @@ def list_events(tag_id, sub_id): # noqa: E501 with conn: with conn.cursor() as cur: cur.execute( - "SELECT DISTINCT event_id, tag_id, submission_id " + "SELECT DISTINCT event_id" "FROM events_log WHERE tag_id = %s AND submission_id = %s ORDER BY tag_id", (tag_id, sub_id), ) @@ -98,8 +100,6 @@ def list_events(tag_id, sub_id): # noqa: E501 events.append( { "event_id": str(event[0]), - "tag_id": event[1], - "submission_id": event[2], } ) cur.execute( @@ -127,7 +127,7 @@ def put_event(event_id, notes=None): # noqa: E501 with conn.cursor() as cur: if notes is not None: cur.execute( - "UPDATE events_log SET notes = %s WHERE event_id = %s", + "UPDATE events_log SET event_notes = %s WHERE event_id = %s", (notes, event_id), ) message = f"Event: '{str(event_id)}' successfully updated." diff --git a/tagbase_server/tagbase_server/controllers/tags_controller.py b/tagbase_server/tagbase_server/controllers/tags_controller.py index 80710fb..115b94e 100644 --- a/tagbase_server/tagbase_server/controllers/tags_controller.py +++ b/tagbase_server/tagbase_server/controllers/tags_controller.py @@ -5,10 +5,6 @@ from tagbase_server.utils.db_utils import connect from tagbase_server import util -import logging - -logger = logging.getLogger(__name__) - def get_tag(tag_id): # noqa: E501 """Get information about an individual tag diff --git a/tagbase_server/tagbase_server/utils/processing_utils.py b/tagbase_server/tagbase_server/utils/processing_utils.py index 249a020..895a79c 100644 --- a/tagbase_server/tagbase_server/utils/processing_utils.py +++ b/tagbase_server/tagbase_server/utils/processing_utils.py @@ -26,11 +26,11 @@ def process_global_attributes( ): event_id = uuid.uuid4() global_start = time.perf_counter() - publish_message( - "event_log/create metadata {} populating-metadata-for-new-tag-submission running {}".format( - event_id, start - ) - ) + # publish_message( + # "event_log/create metadata {} populating-metadata-for-new-tag-submission running {}".format( + # event_id, start + # ) + # ) logger.debug("Processing global attribute: %s", line) tokens = line.strip()[1:].split(" = ") logger.debug("Processing token: %s", tokens) @@ -59,11 +59,11 @@ def process_global_attributes( global_finish = time.perf_counter() global_elapsed = round(finish - start, 2) submission_id = cur.fetchone()[0] - publish_message( - "event_log/update {} {} finished {} {} {}".format( - global_elapsed, event_id, submission_id, submission_id, global_finish - ) - ) + # publish_message( + # "event_log/update {} {} finished {} {} {}".format( + # global_elapsed, event_id, submission_id, submission_id, global_finish + # ) + # ) def process_etuff_file(file, version=None, notes=None): @@ -73,7 +73,7 @@ def process_etuff_file(file, version=None, notes=None): event_id = uuid.uuid4() start = time.perf_counter() publish_message( - "event_log/create submission {} new-tag-submission running {}".format( + "events_log/create submission {} new-tag-submission running {}".format( event_id, dt.now(tz=pytz.utc).astimezone(get_localzone()) ) ) @@ -106,8 +106,12 @@ def process_etuff_file(file, version=None, notes=None): sub_elapsed = round(sub_finish - start, 2) cur.execute("SELECT currval('submission_submission_id_seq')") submission_id = cur.fetchone()[0] + logger.info(submission_id) + # cur.execute("SELECT tag_id FROM submission WHERE filename = %s", (submission_filename)) + # tag_id = cur.fetchone()[0] + # logger.info(tag_id) publish_message( - "event_log/update {} {} finished {} {} {}".format( + "events_log/update {} {} finished {} {} {}".format( sub_elapsed, event_id, submission_id, diff --git a/tagbase_server/tagbase_server/utils/rabbitmq_utils.py b/tagbase_server/tagbase_server/utils/rabbitmq_utils.py index 71fbdf4..ce26f6a 100644 --- a/tagbase_server/tagbase_server/utils/rabbitmq_utils.py +++ b/tagbase_server/tagbase_server/utils/rabbitmq_utils.py @@ -8,7 +8,7 @@ def publish_message(message): connection = pika.BlockingConnection(pika.ConnectionParameters(host="rabbitmq")) channel = connection.channel() - channel.queue_declare(queue="event_log") - channel.basic_publish(exchange="", routing_key="event_log", body=message) + channel.queue_declare(queue="events_log") + channel.basic_publish(exchange="", routing_key="events_log", body=message) logger.info(" [x] Sent: {}".format(message)) connection.close()