diff --git a/.gitignore b/.gitignore index c8b4051..44cc897 100644 --- a/.gitignore +++ b/.gitignore @@ -66,6 +66,9 @@ target/ .env +postgis-data postgres-data logs tagbase_server/tagbase_server/coverage.xml +rabbitmq +dbbackups diff --git a/docker-compose.yml b/docker-compose.yml index 9a15b32..c5e40e7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,9 +18,11 @@ services: "docker_compose_diagram.icon": "docker" links: - postgis + networks: + - internal-network + restart: unless-stopped volumes: - ./dbbackups:/backups - restart: on-failure docker-cron: build: context: ./services/docker-cron @@ -80,36 +82,12 @@ services: networks: - internal-network restart: unless-stopped - # postgres: - # build: - # context: ./services/postgres - # environment: - # - PGDATA=/var/lib/postgresql/data/pgdata - # - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - # expose: - # - ${POSTGRES_PORT} - # healthcheck: - # test: ["CMD-SHELL", "pg_isready -d tagbase -h postgres -U tagbase"] - # interval: 15s - # timeout: 5s - # retries: 5 - # start_period: 15s - # hostname: postgres - # labels: - # "docker_compose_diagram.cluster": "Internal Network" - # "docker_compose_diagram.description": "Tagbase PostgreSQL server" - # networks: - # - internal-network - # ports: - # - ${POSTGRES_PORT}:${POSTGRES_PORT} - # restart: unless-stopped - # volumes: - # - ./postgres-data:/var/lib/postgresql/data postgis: environment: - ALLOW_IP_RANGE=0.0.0.0/0 + - EXTRA_CONF=log_destination = stderr\nlogging_collector = on - PASSWORD_AUTHENTICATION=md5 - - POSTGRES_DB=tagbase + #- POSTGRES_DB=tagbase - POSTGRES_PASS=${POSTGRES_PASSWORD} - POSTGRES_USER=tagbase - POSTGRES_MULTIPLE_EXTENSIONS=postgis,hstore,postgis_topology,postgis_raster,pgrouting @@ -135,6 +113,53 @@ services: - ./dbbackups:/backups - ./postgis-data:/var/lib/postgresql - ./services/postgis/tagbase_schema.sql:/docker-entrypoint-initdb.d/tagbase_schema.sql + rabbitmq: + depends_on: + postgis: + condition: service_healthy + healthcheck: + test: "set -eo pipefail rabbitmqctl eval '{ true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }, { [], no_alarms } = { rabbit:alarms(), no_alarms }, [] /= rabbit_networking:active_listeners(), rabbitmq_node_is_healthy.' || exit 1" + interval: 15s + timeout: 5s + retries: 5 + start_period: 15s + hostname: rabbitmq + image: rabbitmq:3-management-alpine + labels: + "docker_compose_diagram.cluster": "Internal Network" + "docker_compose_diagram.description": "rabbitmq messaging service" + "docker_compose_diagram.icon": "rabbitmq" + networks: + - internal-network + ports: + - 5672:5672 + - 15672:15672 + restart: unless-stopped + volumes: + - ./rabbitmq/data/:/var/lib/rabbitmq/ + - ./logs/rabbitmq/:/var/log/rabbitmq/ + rabbitmq_subscriber: + build: + context: ./services/subscriber + depends_on: + rabbitmq: + condition: service_healthy + environment: + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} + - POSTGRES_PORT=${POSTGRES_PORT} + hostname: rabbitmq_subscriber + labels: + "docker_compose_diagram.cluster": "Internal Network" + "docker_compose_diagram.description": "rabbitmq subscriber service" + "docker_compose_diagram.icon": "rabbitmq" + links: + - postgis + - rabbitmq + networks: + - internal-network + restart: unless-stopped + volumes: + - ./logs/rabbitmq_subscriber:/usr/src/app/logs/rabbitmq_subscriber slack_docker: environment: - webhook=${webhook} @@ -166,10 +191,11 @@ services: "docker_compose_diagram.icon": "flask" links: - postgis + - rabbitmq networks: - internal-network restart: unless-stopped volumes: - - ./logs:/usr/src/app/logs + - ./logs/tagbase_server:/usr/src/app/logs/tagbase_server networks: internal-network: \ No newline at end of file diff --git a/openapi.yaml b/openapi.yaml index 3e4a4c2..7e14e92 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -7,7 +7,7 @@ info: description: | tagbse-server provides HTTP endpoints for ingestion of various files \ into a Tagbase SQL database. Input file support currently includes eTUFF (see [here](https://doi.org/10.6084/m9.figshare.10032848.v4) \ - and [here](https://doi.org/10.6084/m9.figshare.10159820.v1)). The REST API complies with [OpenAPI v3.0.3](https://spec.openapis.org/oas/v3.0.3.html). + and [here](https://doi.org/10.6084/m9.figshare.10159820.v1)). license: name: Apache License v2.0 url: https://www.apache.org/licenses/LICENSE-2.0 @@ -20,11 +20,80 @@ servers: - description: ICCAT Test tagbase-server url: https://162.13.162.49/tagbase/api/v0.7.0 tags: -- description: Ingestion operations. +- description: Event Operations + name: events +- description: Ingestion operations name: ingest -- description: Tag Operations. +- description: Tag Operations name: tags paths: + /events: + get: + description: Get information about all events + operationId: list_all_events + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: A list containing all events. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about all events + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + /events/{event_id}: + get: + description: Get information about an individual event + operationId: get_event + parameters: + - $ref: '#/components/parameters/event_id' + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/event200' + description: Information about an individual tag + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about an individual event + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + put: + description: Update notes for an event + operationId: put_event + parameters: + - $ref: '#/components/parameters/event_id' + - $ref: '#/components/parameters/notes' + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/eventPut200' + description: A success message confirming ingestion. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Update the 'notes' associated with an event + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller /ingest: get: description: Get network accessible file and execute ingestion @@ -50,13 +119,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get network accessible file and execute ingestion tags: @@ -93,13 +162,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Post a local file and perform a ingest operation tags: @@ -115,13 +184,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tags200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tags200' + description: A list of all tags. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about all tags tags: @@ -138,13 +207,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tag200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tag200' + description: Information about an individual tag. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about an individual tag tags: @@ -164,20 +233,52 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TagPut200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tagPut200' + description: Message confirming successful data update "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' - description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml summary: Update the 'notes' and/or 'version' associated with a tag submission tags: - tags x-openapi-router-controller: tagbase_server.controllers.tags_controller + /tags/{tag_id}/subs/{sub_id}/events: + get: + description: Get all events for a given tag submission + operationId: list_events + parameters: + - $ref: '#/components/parameters/tagId' + - $ref: '#/components/parameters/subId' + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: Message confirming successful data update + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml + summary: Get all events for a given tag submission + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller components: parameters: + event_id: + description: Event UUID + explode: true + in: path + name: event_id + required: true + schema: + $ref: '#/components/schemas/event_id' filename: description: Free-form text field to explicitly define the name of the file to be persisted explode: true @@ -203,7 +304,7 @@ components: type: string style: form subId: - description: Existing submission id for an existing tag + description: Numeric submission ID explode: true in: path name: sub_id @@ -211,7 +312,7 @@ components: schema: type: number tagId: - description: Existing tag id + description: Numeric tag ID explode: true in: path name: tag_id @@ -249,13 +350,132 @@ components: example: "eTUFF-sailfish-117259_2.txt" title: filename type: string - Ingest200: + event200: + description: Information for a given event + example: + event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" + tag_id: 1 + submission_id: 1 + event_category: "submission" + event_name: "new tag submission" + time_start: "2022-04-01T04:58:21.319061+00:00" + time_end: "2022-04-01T04:59:21.319061+00:00" + duration: "0:01:00" + event_status: "finished" + event_notes: "Some user defined notes..." + properties: + event_category: + description: ... + example: "..." + type: string + event_id: + $ref: '#/components/schemas/event_id' + event_name: + description: ... + example: "..." + type: string + event_notes: + description: Free-form text field where details of the event can be optionally entered by the client + example: "The event represents a data anomaly with file XYZ because of ..., this has been fixed in version ABC." + type: string + event_status: + description: Free-form text field where details of the event can be optionally entered by the client + enum: + - failed + - finished + - killed + - running + example: failed + type: string + time_start: + description: Local datetime stamp at the time of the event start + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + type: string + time_end: + description: Local datetime stamp at the time of the event end + example: '2022-04-01T04:58:21.319061+00:00' + #format: date + #pattern: '^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?(([+-]\d\d:\d\d)|Z)?$' + type: string + duration: + description: The event duration e.g. different between 'time_start' and 'time_end' + example: "0:01:00" + type: string + submission_id: + $ref: '#/components/schemas/submission_id' + tag_id: + $ref: '#/components/schemas/tag_id' + title: event200 + type: object + eventPut200: + description: Event update response + example: + code: "200" + message: Notes for event '1' successfully updated. + properties: + code: + description: HTTP status code + example: "200" + type: string + message: + description: A string detailing specifics of the HTTP operation + example: Notes for event '1' successfully updated. + type: string + title: eventPut200 + type: object + event_id: + description: UUID associated with a particular event + example: "06335e84-2872-4914-8c5d-3ed07d2a2f16" + format: uuid + title: event_id + type: string + events200: + description: Response detailing all available events + example: + count: 3 + events: + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2f16" + tag_id: 1 + submission_id: 1 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" + tag_id: 1 + submission_id: 1 + - event_id: "06335e84-2872-4914-8c5d-3ed07d2a2fkf" + tag_id: 1 + submission_id: 1 + properties: + count: + description: Total count of unique events + example: 3 + type: integer + events: + description: List of event UUID's and associated tag and submission IDs + type: array + items: + description: UUID and associated tag and submission ID + example: + event_id: "06335e84-2872-4914-8c5d-3ed07d2a2dj4" + tag_id: 1 + submission_id: 1 + properties: + event_id: + $ref: '#/components/schemas/event_id' + tag_id: + $ref: '#/components/schemas/tag_id' + submission_id: + $ref: '#/components/schemas/submission_id' + type: object + title: tags200 + type: object + ingest200: description: HTTP 200 success response example: code: "200" elapsed: '0:00:06.506691' message: Data file eTUFF-sailfish-117259.txt successfully ingested into Tagbase - DB. + DB properties: code: description: HTTP status code @@ -267,11 +487,11 @@ components: message: description: A string detailing specifics of an HTTP operation example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string - title: Ingest200 + title: ingest200 type: object - Response500: + response500: description: 500 Internal Server Error example: code: "200" @@ -286,19 +506,24 @@ components: message: description: A string detailing specifics of the HTTP 500 response example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string more_info: - description: Additional details (if available) to diagnose the 500 response. + description: Additional details (if available) to diagnose the 500 response example: https://httpwg.org/specs/rfc7231.html#status.500 type: string trace: description: Trace diagnostic information related to the response example: 123e4567-e89b-12d3-a456-426614174000 type: string - title: Response500 + title: response500 type: object - Tag200: + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + tag200: description: Information for a given tag example: tag: @@ -328,7 +553,7 @@ components: version: '2' properties: tag: - description: List containing one or more submissions for a given tag + description: List containing submissions for a given tag items: type: object properties: @@ -356,21 +581,17 @@ components: example: "Ingested by admin on XXXX-XX-XX to back-process campaign XYZ." type: string submission_id: - description: Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase - example: 5 - type: integer + $ref: '#/components/schemas/submission_id' tag_id: - description: Unique numeric Tag ID associated with the ingested tag eTUFF data file - example: 3 - type: integer + $ref: '#/components/schemas/tag_id' version: description: Version identifier for the eTUFF tag data file ingested example: "1" type: string type: array - title: Tag200 + title: tag200 type: object - TagPut200: + tagPut200: description: HTTP 200 success response example: code: "200" @@ -384,9 +605,14 @@ components: description: A string detailing specifics of an HTTP operation example: Tag '1' submission '5' successfully updated. type: string - title: TagPut200 + title: tagPut200 type: object - Tags200: + tag_id: + description: Unique numeric tag ID associated with the ingested tag data file + example: 1 + title: tag_id + type: integer + tags200: description: Response detailing all available unique tags and associated filename example: count: 2 @@ -410,13 +636,9 @@ components: filename: eTUFF-sailfish-117259_2.txt properties: tag_id: - description: Unique numeric Tag ID associated with the ingested tag data file - example: 1 - type: integer + $ref: '#/components/schemas/tag_id' filename: - description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259.txt - type: string + $ref: '#/components/schemas/filename' type: object - title: Tags200 + title: tags200 type: object diff --git a/services/postgis/tagbase_schema.sql b/services/postgis/tagbase_schema.sql index ce27ba4..868ee4c 100644 --- a/services/postgis/tagbase_schema.sql +++ b/services/postgis/tagbase_schema.sql @@ -24,492 +24,233 @@ SET default_table_access_method = heap; SET default_with_oids = false; --- --- Name: data_histogram_bin_data; Type: TABLE; Schema: public; Owner: postgres --- - -CREATE TABLE data_histogram_bin_data ( - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, - bin_id bigint NOT NULL, - bin_class integer NOT NULL, - date_time timestamp(6) with time zone NOT NULL, - variable_value character varying(30) NOT NULL, - position_date_time timestamp(6) with time zone, - variable_id bigint NOT NULL +CREATE TYPE status_enum AS ENUM ('failed', 'finished', 'killed', 'running'); + +CREATE TABLE events_log ( + submission_id integer, + tag_id integer, + event_id UUID NOT NULL, + event_category character varying(30) NOT NULL, + event_name character varying(30) NOT NULL, + time_start timestamp(6) with time zone NOT NULL, + time_end timestamp(6) with time zone, + duration double precision, + event_status status_enum NOT NULL, + event_notes text ); +ALTER TABLE events_log OWNER TO postgres; -ALTER TABLE data_histogram_bin_data OWNER TO postgres; +COMMENT ON TABLE events_log IS 'Contains a persistent events register for capturing application behaviour, usage, statuses and anomalies pertaining to data. Capture, in an easily extensible form, key discrete database events relating to the ingestion, migration, summarization (and possibly external connection, querying, and usage). This includes records of event type, timing, and status/outcome pertaining to a given tag dataset and submission.'; --- --- Name: TABLE data_histogram_bin_data; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN events_log.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; -COMMENT ON TABLE data_histogram_bin_data IS 'Contains the frequency for corresponding summary data binning schemes (migrated from proc_observations)'; +COMMENT ON COLUMN events_log.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; +COMMENT ON COLUMN events_log.event_id IS 'UUID associated with the persisted event'; --- --- Name: COLUMN data_histogram_bin_data.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN events_log.event_category IS 'A specific phase within the Tagbase lifecycle e.g. submission, ingestion, migration, reporting.'; -COMMENT ON COLUMN data_histogram_bin_data.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; +COMMENT ON COLUMN events_log.event_name IS 'Name tying the event category to the actual event characteristics'; +COMMENT ON COLUMN events_log.time_start IS 'Event start time'; --- --- Name: COLUMN data_histogram_bin_data.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN events_log.time_end IS 'Event end time'; -COMMENT ON COLUMN data_histogram_bin_data.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; +COMMENT ON COLUMN events_log.duration IS 'Event duration'; +COMMENT ON COLUMN events_log.event_status IS 'Enumerated event status'; --- --- Name: COLUMN data_histogram_bin_data.bin_id; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN data_histogram_bin_data.bin_id IS 'Unique bin ID for the summary bin-frequency class'; +COMMENT ON COLUMN events_log.event_notes IS 'User-defined notes to better describe an event'; +CREATE TABLE data_histogram_bin_data ( + submission_id integer NOT NULL, + tag_id integer NOT NULL, + bin_id integer NOT NULL, + bin_class integer NOT NULL, + date_time timestamp(6) with time zone NOT NULL, + variable_value character varying(30) NOT NULL, + position_date_time timestamp(6) with time zone, + variable_id integer NOT NULL +); --- --- Name: COLUMN data_histogram_bin_data.bin_class; Type: COMMENT; Schema: public; Owner: postgres --- +ALTER TABLE data_histogram_bin_data OWNER TO postgres; -COMMENT ON COLUMN data_histogram_bin_data.bin_class IS 'Sequential numeric bin class identifier related to either Depth or Temperature. Usually there are 12 (1-12) bin ranges (Min and Max Depth or Temperature respectively), however there are times the bin ranges will not be 12, but instead 14 or 16. The larger the number, the more recent the tag models are from tag manufacturers, as they make more bytes available for storage.'; +COMMENT ON TABLE data_histogram_bin_data IS 'Contains the frequency for corresponding summary data binning schemes (migrated from proc_observations)'; +COMMENT ON COLUMN data_histogram_bin_data.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; --- --- Name: COLUMN data_histogram_bin_data.date_time; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN data_histogram_bin_data.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; -COMMENT ON COLUMN data_histogram_bin_data.date_time IS 'Date/time stamp of the tag summarized bin-frequency data record'; +COMMENT ON COLUMN data_histogram_bin_data.bin_id IS 'Unique bin ID for the summary bin-frequency class'; +COMMENT ON COLUMN data_histogram_bin_data.bin_class IS 'Sequential numeric bin class identifier related to either Depth or Temperature. Usually there are 12 (1-12) bin ranges (Min and Max Depth or Temperature respectively), however there are times the bin ranges will not be 12, but instead 14 or 16. The larger the number, the more recent the tag models are from tag manufacturers, as they make more bytes available for storage.'; --- --- Name: COLUMN data_histogram_bin_data.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN data_histogram_bin_data.date_time IS 'Date/time stamp of the tag summarized bin-frequency data record'; COMMENT ON COLUMN data_histogram_bin_data.variable_value IS 'Aggregate measure for the given bin-interval of the geophysical value of the observed tag variable record'; - --- --- Name: COLUMN data_histogram_bin_data.position_date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_data.position_date_time IS 'Date/time stamp of nearest matched associated positional record'; - --- --- Name: COLUMN data_histogram_bin_data.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_data.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table. Note that records in this table are NOT expected to be equivalent to those in the variable_id column of the data_histogram_bin_info table'; - --- --- Name: data_histogram_bin_info; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_histogram_bin_info ( - bin_id bigint NOT NULL, + bin_id integer NOT NULL, bin_class integer NOT NULL, min_value character varying(30) NOT NULL, max_value character varying(30) NOT NULL, - variable_id bigint NOT NULL + variable_id integer NOT NULL ); - ALTER TABLE data_histogram_bin_info OWNER TO postgres; --- --- Name: TABLE data_histogram_bin_info; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_histogram_bin_info IS 'Contains definitions of binning schemes for summary tag data (migrated from proc_observations)'; - --- --- Name: COLUMN data_histogram_bin_info.bin_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.bin_id IS 'Unique bin ID for the summary bin-frequency class'; - --- --- Name: COLUMN data_histogram_bin_info.bin_class; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.bin_class IS 'Sequential numeric bin class identifier related to either Depth or Temperature. Usually there are 12 (1-12) bin ranges (Min and Max Depth or Temperature respectively), however there are times the bin ranges will not be 12, but instead 14 or 16. The larger the number, the more recent the tag models are from tag manufacturers, as they make more bytes available for storage.'; - --- --- Name: COLUMN data_histogram_bin_info.min_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.min_value IS 'Value of minimum/lower bound of bin interval'; - --- --- Name: COLUMN data_histogram_bin_info.max_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.max_value IS 'Value of maximum/upper bound of bin interval'; - --- --- Name: COLUMN data_histogram_bin_info.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_histogram_bin_info.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table. Note that records in this table are NOT expected to be equivalent to those in the variable_id column of the data_histogram_bin_data table'; - --- --- Name: data_position; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_position ( date_time timestamp(6) with time zone NOT NULL, lat character varying(30) NOT NULL, lon character varying(30) NOT NULL, lat_err character varying(30), lon_err character varying(30), - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, argos_location_class character varying(1), solution_id integer NOT NULL DEFAULT 1, flag_as_reference integer NOT NULL DEFAULT 0 ); - ALTER TABLE data_position OWNER TO postgres; --- --- Name: TABLE data_position; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_position IS 'Contains the tag positional data series with associated Lat/Lon error estimates where available (migrated from proc_observations)'; - --- --- Name: COLUMN data_position.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.date_time IS 'Date/time stamp of the tag positional data record'; - --- --- Name: COLUMN data_position.lat; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lat IS 'Latitude in decimal degrees of the positional data tag record'; - --- --- Name: COLUMN data_position.lon; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lon IS 'Longitude in decimal degrees of the positional data tag record'; - --- --- Name: COLUMN data_position.lat_err; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lat_err IS 'Error associated with the tag record Latitudinal positional estimate'; - --- --- Name: COLUMN data_position.lon_err; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.lon_err IS 'Error associated with the tag record Longitudinal positional estimate'; - --- --- Name: COLUMN data_position.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN data_position.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN data_position.argos_location_class; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.argos_location_class IS 'ARGOS Location Class code (G,3,2,1,0,A,B,Z) https://www.argos-system.org/wp-content/uploads/2016/08/r363_9_argos_users_manual-v1.6.6.pdf , page 13.'; - --- --- Name: COLUMN data_position.solution_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.solution_id IS 'Unique numeric identifier for a given tag geolocation dataset solution. solution_id=1 is assigned to the primary or approved solution. Incremented solution_id''s assigned to other positional dataset solutions for a given tag_id and submission_id'; - --- --- Name: COLUMN data_position.flag_as_reference; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_position.flag_as_reference IS 'Integer (representing psudo boolean value) flag field which identifies whether positional data associated with a given Tag and Track solution are considered to be coordinates of the "Reference" track (ie. best solution currently). Coordinate record takes 1 if it is part of the Reference track or 0 if it is not.'; --- --- Name: data_profile; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_profile ( - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, - variable_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, + variable_id integer NOT NULL, date_time timestamp(6) with time zone NOT NULL, depth character varying(30) NOT NULL, variable_value character varying(30) DEFAULT '', position_date_time timestamp(6) with time zone ); - ALTER TABLE data_profile OWNER TO postgres; --- --- Name: TABLE data_profile; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_profile IS 'Contains the summarized bin profile tag observations (migrated from proc_observations)'; - --- --- Name: COLUMN data_profile.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN data_profile.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN data_profile.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; - --- --- Name: COLUMN data_profile.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.date_time IS 'Date/time stamp of the tag data record'; - --- --- Name: COLUMN data_profile.depth; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.depth IS 'Depth of the tag data record'; - --- --- Name: COLUMN data_profile.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.variable_value IS 'Geophysical value of the observed tag variable record'; - --- --- Name: COLUMN data_profile.position_date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_profile.position_date_time IS 'Date/time stamp of nearest matched associated positional record'; - --- --- Name: data_time_series; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE data_time_series ( date_time timestamp(6) with time zone NOT NULL, - variable_id bigint NOT NULL, + variable_id integer NOT NULL, variable_value character varying(30) NOT NULL, - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, position_date_time timestamp(6) with time zone ); - ALTER TABLE data_time_series OWNER TO postgres; --- --- Name: TABLE data_time_series; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE data_time_series IS 'Contains the continuous measurement archival time series of tag geophysical measurements (migrated from proc_observations)'; - --- --- Name: COLUMN data_time_series.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.date_time IS 'Date/time stamp of the tag data record'; - --- --- Name: COLUMN data_time_series.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; - --- --- Name: COLUMN data_time_series.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.variable_value IS 'Geophysical value of the observed tag variable record'; - --- --- Name: COLUMN data_time_series.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN data_time_series.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN data_time_series.position_date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN data_time_series.position_date_time IS 'Date/time stamp of nearest matched associated positional record'; - --- --- Name: metadata; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE metadata ( - submission_id bigint NOT NULL, - attribute_id bigint NOT NULL, + submission_id integer NOT NULL, + attribute_id integer NOT NULL, attribute_value text NOT NULL, - tag_id bigint NOT NULL + tag_id integer NOT NULL ); - ALTER TABLE metadata OWNER TO postgres; --- --- Name: TABLE metadata; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE metadata IS 'Contains the ingested tag metadata consistent with the eTUFF specification'; - --- --- Name: COLUMN metadata.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN metadata.attribute_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.attribute_id IS 'Unique numeric metadata attribute ID based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata.attribute_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.attribute_value IS 'Value associated with the given eTUFF metadata attribute'; - --- --- Name: COLUMN metadata.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: metadata_position; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE metadata_position ( - submission_id bigint NOT NULL, - attribute_id bigint NOT NULL, + submission_id integer NOT NULL, + attribute_id integer NOT NULL, attribute_value text NOT NULL, - tag_id bigint NOT NULL, + tag_id integer NOT NULL, solution_id integer NOT NULL DEFAULT 1 ); - ALTER TABLE metadata_position OWNER TO postgres; --- --- Name: TABLE metadata_position; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE metadata_position IS 'Contains the ingested tag metadata consistent with the eTUFF specification'; - --- --- Name: COLUMN metadata_position.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN metadata_position.attribute_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.attribute_id IS 'Unique numeric metadata attribute ID based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_position.attribute_value; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.attribute_value IS 'Value associated with the given eTUFF metadata attribute'; - --- --- Name: COLUMN metadata_position.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; - --- --- Name: COLUMN metadata_position.solution_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_position.solution_id IS 'Unique numeric identifier for a given tag geolocation dataset solution. solution_id=1 is assigned to the primary or approved solution. Incremented solution_id''s assigned to other positional dataset solutions for a given tag_id and submission_id'; - --- --- Name: metadata_types; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE metadata_types ( - attribute_id bigint NOT NULL, + attribute_id integer NOT NULL, category character varying(1024) NOT NULL, attribute_name character varying(1024) NOT NULL, description text NOT NULL, @@ -518,71 +259,26 @@ CREATE TABLE metadata_types ( necessity character varying(1024) NOT NULL ); - ALTER TABLE metadata_types OWNER TO postgres; --- --- Name: TABLE metadata_types; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE metadata_types IS 'Contains descriptive information on tag metadata based on the eTUFF specification'; - --- --- Name: COLUMN metadata_types.attribute_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.attribute_id IS 'Unique numeric metadata attribute ID based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.category; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.category IS 'Metadata attribute category or group based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.attribute_name; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.attribute_name IS 'Name of metadata attribute based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.description; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.description IS 'Description of metadata attribute based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.example; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.example IS 'Example value of metadata attribute on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.comments; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.comments IS 'Comments or notes relating to the metadata attribute based on the eTUFF metadata specification'; - --- --- Name: COLUMN metadata_types.necessity; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN metadata_types.necessity IS 'Designation of the metadata attribute as Required, Recommended, or Optional based on the eTUFF metadata specification'; - --- --- Name: observation_types; Type: TABLE; Schema: public; Owner: postgres --- - CREATE TABLE observation_types ( - variable_id bigint NOT NULL, + variable_id integer NOT NULL, variable_name character varying(255) NOT NULL, standard_name character varying(255), variable_source character varying(255), @@ -591,69 +287,24 @@ CREATE TABLE observation_types ( standard_unit character varying(255) ); - ALTER TABLE observation_types OWNER TO postgres; --- --- Name: TABLE observation_types; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE observation_types IS 'Contains listings and descriptions of observation variable types based on the eTUFF specification'; - --- --- Name: COLUMN observation_types.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_id IS 'Unique variable identifier based on the eTUFF tag data file specification'; - --- --- Name: COLUMN observation_types.variable_name; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_name IS 'Variable name based on the eTUFF tag data file specification'; - --- --- Name: COLUMN observation_types.standard_name; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.standard_name IS 'CF Standard name for observation variable, if available'; - --- --- Name: COLUMN observation_types.variable_source; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_source IS 'Source authority for the given variables'; - --- --- Name: COLUMN observation_types.variable_units; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.variable_units IS 'Units of the variable based on the eTUFF tag data file specification'; - --- --- Name: COLUMN observation_types.notes; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.notes IS 'Notes or comments relating to the variable'; - --- --- Name: COLUMN observation_types.standard_unit; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN observation_types.standard_unit IS 'CF canonical standard unit for observation variable, if available'; - --- --- Name: observation_types_variable_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres --- - CREATE SEQUENCE observation_types_variable_id_seq START WITH 1 INCREMENT BY 1 @@ -661,142 +312,57 @@ CREATE SEQUENCE observation_types_variable_id_seq NO MAXVALUE CACHE 1; - ALTER TABLE observation_types_variable_id_seq OWNER TO postgres; --- --- Name: observation_types_variable_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres --- - ALTER SEQUENCE observation_types_variable_id_seq OWNED BY observation_types.variable_id; - --- --- Name: proc_observations; Type: TABLE; Schema: public; Owner: postgres --- - CREATE UNLOGGED TABLE proc_observations ( date_time timestamp(6) with time zone NOT NULL, - variable_id bigint NOT NULL, + variable_id integer NOT NULL, variable_value character varying(30) NOT NULL, - submission_id bigint NOT NULL, - tag_id bigint NOT NULL + submission_id integer NOT NULL, + tag_id integer NOT NULL ); +ALTER TABLE proc_observations OWNER TO postgres; -ALTER TABLE proc_observations OWNER TO postgres; - --- --- Name: TABLE proc_observations; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON TABLE proc_observations IS 'Contains staged source tag eTUFF data imported into Tagbase'; - - --- --- Name: COLUMN proc_observations.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.date_time IS 'Date/time stamp of data record from source eTUFF file ingested'; - - --- --- Name: COLUMN proc_observations.variable_id; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; - - --- --- Name: COLUMN proc_observations.variable_value; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.variable_value IS 'Value of the given observation_type variable for the eTUFF data record'; - - --- --- Name: COLUMN proc_observations.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - -COMMENT ON COLUMN proc_observations.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; +COMMENT ON TABLE proc_observations IS 'Contains staged source tag eTUFF data imported into Tagbase'; +COMMENT ON COLUMN proc_observations.date_time IS 'Date/time stamp of data record from source eTUFF file ingested'; --- --- Name: COLUMN proc_observations.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- +COMMENT ON COLUMN proc_observations.variable_id IS 'Unique variable identifier for the data record from the source eTUFF file ingested. The variable_id is based on observation or measurement variables listed in the observation_types table'; -COMMENT ON COLUMN proc_observations.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; +COMMENT ON COLUMN proc_observations.variable_value IS 'Value of the given observation_type variable for the eTUFF data record'; +COMMENT ON COLUMN proc_observations.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; --- --- Name: submission; Type: TABLE; Schema: public; Owner: postgres --- +COMMENT ON COLUMN proc_observations.tag_id IS 'Unique numeric Tag ID associated with the ingested tag data file'; CREATE TABLE submission ( - submission_id bigint NOT NULL, - tag_id bigint NOT NULL, + submission_id integer NOT NULL, + tag_id integer NOT NULL, date_time timestamp(6) with time zone DEFAULT now() NOT NULL, filename text NOT NULL, version character varying(50), notes text ); - ALTER TABLE submission OWNER TO postgres; --- --- Name: TABLE submission; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON TABLE submission IS 'Contains information on source tag eTUFF files submitted for ingest into Tagbase'; - --- --- Name: COLUMN submission.submission_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.submission_id IS 'Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase'; - --- --- Name: COLUMN submission.tag_id; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.tag_id IS 'Unique numeric Tag ID associated with the ingested tag eTUFF data file'; - --- --- Name: COLUMN submission.date_time; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.date_time IS 'Local datetime stamp at the time of eTUFF tag data file ingestion'; - --- --- Name: COLUMN submission.filename; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.filename IS 'Full path, name and extension of the ingested eTUFF tag data file'; - --- --- Name: COLUMN submission.version; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.version IS 'Version identifier for the eTUFF tag data file ingested'; - --- --- Name: COLUMN submission.notes; Type: COMMENT; Schema: public; Owner: postgres --- - COMMENT ON COLUMN submission.notes IS 'Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solutionpositional meta/data)'; - --- --- Name: submission_submission_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres --- - CREATE SEQUENCE submission_submission_id_seq START WITH 1 INCREMENT BY 1 @@ -804,20 +370,10 @@ CREATE SEQUENCE submission_submission_id_seq NO MAXVALUE CACHE 1; - ALTER TABLE submission_submission_id_seq OWNER TO postgres; --- --- Name: submission_submission_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres --- - ALTER SEQUENCE submission_submission_id_seq OWNED BY submission.submission_id; - --- --- Name: submission_tag_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres --- - CREATE SEQUENCE submission_tag_id_seq START WITH 1 INCREMENT BY 1 @@ -825,401 +381,149 @@ CREATE SEQUENCE submission_tag_id_seq NO MAXVALUE CACHE 1; - ALTER TABLE submission_tag_id_seq OWNER TO postgres; ALTER SEQUENCE submission_tag_id_seq OWNED BY submission.tag_id; --- --- Name: observation_types variable_id; Type: DEFAULT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY observation_types ALTER COLUMN variable_id SET DEFAULT nextval('observation_types_variable_id_seq'::regclass); - --- --- Name: submission submission_id; Type: DEFAULT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY submission ALTER COLUMN submission_id SET DEFAULT nextval('submission_submission_id_seq'::regclass); - --- --- Data for Name: data_histogram_bin_data; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_histogram_bin_data (submission_id, tag_id, bin_id, bin_class, date_time, variable_value, position_date_time, variable_id) FROM stdin; \. - --- --- Data for Name: data_histogram_bin_info; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_histogram_bin_info (bin_id, bin_class, min_value, max_value, variable_id) FROM stdin; \. - --- --- Data for Name: data_position; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_position (date_time, lat, lon, lat_err, lon_err, submission_id, tag_id, argos_location_class, solution_id) FROM stdin; \. - --- --- Data for Name: data_profile; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_profile (submission_id, tag_id, variable_id, date_time, depth, variable_value, position_date_time) FROM stdin; \. - --- --- Data for Name: data_time_series; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY data_time_series (date_time, variable_id, variable_value, submission_id, tag_id, position_date_time) FROM stdin; \. - --- --- Data for Name: metadata; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY metadata (submission_id, attribute_id, attribute_value, tag_id) FROM stdin; \. - --- --- Data for Name: metadata_position; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY metadata_position (submission_id, attribute_id, attribute_value, tag_id, solution_id) FROM stdin; \. - --- --- Data for Name: proc_observations; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY proc_observations (date_time, variable_id, variable_value, submission_id, tag_id) FROM stdin; \. - --- --- Data for Name: submission; Type: TABLE DATA; Schema: public; Owner: postgres --- - COPY submission (submission_id, tag_id, date_time, filename, version, notes) FROM stdin; \. - --- --- Name: observation_types_variable_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres --- - SELECT pg_catalog.setval('observation_types_variable_id_seq', 1, false); - --- --- Name: submission_submission_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres --- - SELECT pg_catalog.setval('submission_submission_id_seq', 1, false); - --- --- Name: submission_tag_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres --- - SELECT pg_catalog.setval('submission_tag_id_seq', 1, false); - --- --- Name: data_histogram_bin_data data_histogram_bin_data_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT data_histogram_bin_data_pkey PRIMARY KEY (submission_id, tag_id, variable_id, bin_id, bin_class, date_time) WITH (fillfactor='100'); - --- --- Name: data_histogram_bin_info data_histogram_bin_info_bin_id_bin_class_key; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_info ADD CONSTRAINT data_histogram_bin_info_bin_id_bin_class_key UNIQUE (bin_id, bin_class); - --- --- Name: data_histogram_bin_info data_histogram_bin_info_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_info ADD CONSTRAINT data_histogram_bin_info_pkey PRIMARY KEY (variable_id, bin_id, bin_class) WITH (fillfactor='100'); - --- --- Name: data_position data_position_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_position ADD CONSTRAINT data_position_pkey PRIMARY KEY (submission_id, tag_id, solution_id, date_time) WITH (fillfactor='100'); - --- --- Name: data_profile data_profile_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_profile ADD CONSTRAINT data_profile_pkey PRIMARY KEY (submission_id, tag_id, date_time, depth, variable_id) WITH (fillfactor='100'); - --- --- Name: data_time_series data_time_series_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_time_series ADD CONSTRAINT data_time_series_pkey PRIMARY KEY (submission_id, tag_id, variable_id, date_time) WITH (fillfactor='100'); - --- --- Name: metadata metadata_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata ADD CONSTRAINT metadata_pkey PRIMARY KEY (submission_id, attribute_id); - --- --- Name: metadata_position metadata_pkey01; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_position ADD CONSTRAINT metadata_pkey01 PRIMARY KEY (submission_id, attribute_id, tag_id, solution_id) WITH (fillfactor='100'); - --- --- Name: metadata_types metadata_types_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_types ADD CONSTRAINT metadata_types_pkey PRIMARY KEY (attribute_id); - --- --- Name: observation_types observation_types_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY observation_types ADD CONSTRAINT observation_types_pkey PRIMARY KEY (variable_id); - --- --- Name: observation_types observation_types_variable_name_key; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY observation_types ADD CONSTRAINT observation_types_variable_name_key UNIQUE (variable_name); - --- --- Name: proc_observations proc_observations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_pkey PRIMARY KEY (date_time, variable_id, submission_id); - --- --- Name: submission submission_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY submission ADD CONSTRAINT submission_pkey PRIMARY KEY (submission_id); - --- --- Name: data_histogram_bin_data_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_histogram_bin_data_date_time_index ON data_histogram_bin_data USING btree (date_time); - --- --- Name: data_histogram_bin_data_pos_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_histogram_bin_data_pos_date_time_index ON data_histogram_bin_data USING btree (position_date_time); - --- --- Name: data_position_date_time; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_position_date_time ON data_position USING btree (date_time); - --- --- Name: data_position_latlontime_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_position_latlontime_index ON data_position USING btree (submission_id, tag_id, solution_id, date_time, lat, lon, argos_location_class) WITH (fillfactor='100'); - --- --- Name: data_profile_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_profile_date_time_index ON data_profile USING btree (date_time); - --- --- Name: data_profile_pos_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_profile_pos_date_time_index ON data_profile USING btree (position_date_time); - --- --- Name: data_time_series_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_time_series_date_time_index ON data_time_series USING btree (date_time); - --- --- Name: data_time_series_pos_date_time_index; Type: INDEX; Schema: public; Owner: postgres --- - CREATE INDEX data_time_series_pos_date_time_index ON data_time_series USING btree (position_date_time); - --- --- Name: data_histogram_bin_data data_histogram_bin_data_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT data_histogram_bin_data_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: data_histogram_bin_info data_histogram_bin_info; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_info ADD CONSTRAINT data_histogram_bin_info FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_position data_position_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_position ADD CONSTRAINT data_position_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: data_time_series data_time_series_data_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_time_series ADD CONSTRAINT data_time_series_data_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); - --- --- Name: data_time_series data_time_series_variable_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_time_series ADD CONSTRAINT data_time_series_variable_id_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_histogram_bin_data datahistogrambindata_observationtypes_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT datahistogrambindata_observationtypes_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_profile dataprofile_observationtypes_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_profile ADD CONSTRAINT dataprofile_observationtypes_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- Name: data_profile dataprofile_submission_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_profile ADD CONSTRAINT dataprofile_submission_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); - --- --- Name: data_histogram_bin_data histogrambindata_histogrambininfo_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY data_histogram_bin_data ADD CONSTRAINT histogrambindata_histogrambininfo_fkey FOREIGN KEY (bin_id, bin_class) REFERENCES data_histogram_bin_info(bin_id, bin_class); - --- --- Name: metadata metadata_attribute_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata ADD CONSTRAINT metadata_attribute_id_fkey FOREIGN KEY (attribute_id) REFERENCES metadata_types(attribute_id); - --- --- Name: metadata_position metadata_attribute_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_position ADD CONSTRAINT metadata_attribute_id_fkey FOREIGN KEY (attribute_id) REFERENCES metadata_types(attribute_id); - --- --- Name: metadata metadata_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata ADD CONSTRAINT metadata_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: metadata_position metadata_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY metadata_position ADD CONSTRAINT metadata_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: proc_observations proc_observations_submission_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_submission_id_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id) ON DELETE CASCADE; - --- --- Name: proc_observations proc_observations_variable_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres --- - ALTER TABLE ONLY proc_observations ADD CONSTRAINT proc_observations_variable_id_fkey FOREIGN KEY (variable_id) REFERENCES observation_types(variable_id); - --- --- PostgreSQL database dump complete --- +-- ALTER TABLE ONLY events_log +-- ADD CONSTRAINT eventslog_submission_fkey FOREIGN KEY (submission_id) REFERENCES submission(submission_id); -- -- The following TRIGGER ensures that upon ingestion of an eTUFF file into tagbase-server, diff --git a/services/subscriber/Dockerfile b/services/subscriber/Dockerfile new file mode 100644 index 0000000..08a1f30 --- /dev/null +++ b/services/subscriber/Dockerfile @@ -0,0 +1,18 @@ +FROM python:slim-bullseye + +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +COPY requirements.txt /usr/src/app/ + +RUN apt update && \ + apt install -y --force-yes bash curl gcc iputils-ping && \ + apt clean all && \ + python3 -m pip install --upgrade pip && \ + python3 -m pip install -r requirements.txt --no-cache-dir + + +ADD rabbitmq_subscriber.py rabbitmq_subscriber.py +ADD db_utils.py db_utils.py + +CMD ["python3", "rabbitmq_subscriber.py"] diff --git a/services/subscriber/db_utils.py b/services/subscriber/db_utils.py new file mode 100644 index 0000000..fc30f09 --- /dev/null +++ b/services/subscriber/db_utils.py @@ -0,0 +1,107 @@ +import logging +import os +import psycopg2 +import psycopg2.extras + +psycopg2.extras.register_uuid() +logger = logging.getLogger("rabbitmq_subscriber") + + +def connect(): + """ + Make and return a connection to TagbaseDB. This function also improves handling of Operational errors + if they occur. + :rtype: connection + """ + logger.debug("Attempting connection to TagbaseDB...") + try: + conn = psycopg2.connect( + dbname="tagbase", + user="tagbase", + host="postgis", + port=os.getenv("POSTGRES_PORT"), + password=os.getenv("POSTGRES_PASSWORD"), + ) + except psycopg2.OperationalError as poe: + logger.error("Unable to connect to the database") + return { + "code": "500", + "message": "Encountered psycopg2.OperationalError when attempting to establish a connection " + "to the Tagbase database.", + "more_info": "Contact the service administrator - {email}".format( + email=os.getenv("PGADMIN_DEFAULT_EMAIL") + ), + "trace": poe, + } + logger.debug("Successfully connected to TagbaseDB.") + return conn + + +def create_event( + event_category=None, + event_id=None, + event_name=None, + event_status=None, + time_start=None, +): + """ + Create a new event in the events_log table. Note the event_id UUID is not automatically generated. + It must be passed to this function call. + """ + logger.debug("Creating new event: %s in events log...", event_id) + event_conn = connect() + with event_conn: + with event_conn.cursor() as event_cur: + event_cur.execute( + "INSERT INTO events_log (event_id, event_category, event_name, time_start, event_status) " + "VALUES (%s, %s, %s, %s, %s)", + (event_id, event_category, event_name, time_start, event_status), + ) + logger.info( + "CREATED new event: '%s'", + str(event_id), + ) + event_conn.commit() + event_cur.close() + event_conn.close() + + +def update_event( + duration=None, + event_id=None, + event_status=None, + submission_id=None, + tag_id=None, + time_end=None, +): + """ + Update existing event in the events_log table with new data. + """ + logger.debug( + "Updating event: '%s' in events log...", + event_id, + ) + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "UPDATE events_log " + "SET submission_id = %s, tag_id = %s, event_id = %s, time_end = %s, duration = %s, event_status = %s" + " WHERE event_id = %s", + ( + submission_id, + tag_id, + event_id, + time_end, + duration, + event_status, + event_id, + ), + ) + logger.info( + "UPDATED event: '%s'", + str(event_id), + ) + conn.commit() + cur.close() + conn.close() diff --git a/services/subscriber/rabbitmq_subscriber.py b/services/subscriber/rabbitmq_subscriber.py new file mode 100644 index 0000000..4d9d4c6 --- /dev/null +++ b/services/subscriber/rabbitmq_subscriber.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +import logging +import os +import pika +import sys + +from logging.handlers import RotatingFileHandler + +LOGGER_NAME = "rabbitmq_subscriber" + +os.makedirs("./logs/{}".format(LOGGER_NAME), exist_ok=True) +logger = logging.getLogger(LOGGER_NAME) +if logger.hasHandlers(): + logger.handlers = [] +logger.setLevel(logging.INFO) + +formatter = logging.Formatter( + "%(asctime)s - %(filename)s:%(lineno)d - %(levelname)s - %(message)s" +) + +s_handler = logging.StreamHandler() +s_handler.setFormatter(formatter) +logger.addHandler(s_handler) + +rf_handler = RotatingFileHandler( + f"./logs/{LOGGER_NAME}/{LOGGER_NAME}_log.txt", + mode="a", + maxBytes=100000, + backupCount=10, +) +rf_handler.setFormatter(formatter) +logger.addHandler(rf_handler) + + +def process_topic(topic=None, msg_parts=None): + import db_utils + import uuid + + if topic == "events_log/create": + db_utils.create_event( + event_category=msg_parts[0], + event_id=uuid.UUID(msg_parts[1]), + event_name=msg_parts[2], + event_status=msg_parts[3], + time_start=msg_parts[4], + ) + else: + logger.info(msg_parts) + db_utils.update_event( + duration=msg_parts[0], + event_id=uuid.UUID(msg_parts[1]), + event_status=msg_parts[2], + submission_id=msg_parts[3], + tag_id=msg_parts[4], + time_end=msg_parts[5], + ) + + +def subscriber(): + connection = pika.BlockingConnection(pika.ConnectionParameters(host="rabbitmq")) + channel = connection.channel() + channel.queue_declare(queue="events_log") + + def callback(ch, method, properties, body): + logger.info("Received: %r" % body) + topic, messagedata = body.decode("utf-8").split(" ", 1) + process_topic(topic, messagedata.split(" ")) + + channel.basic_consume( + queue="events_log", on_message_callback=callback, auto_ack=True + ) + logger.info("Waiting for messages...") + channel.start_consuming() + + +if __name__ == "__main__": + subscriber() diff --git a/services/subscriber/requirements.txt b/services/subscriber/requirements.txt new file mode 100644 index 0000000..a8e6b2e --- /dev/null +++ b/services/subscriber/requirements.txt @@ -0,0 +1,2 @@ +pika==1.3.1 +psycopg2-binary==2.9.5 \ No newline at end of file diff --git a/tagbase_server/.openapi-generator/FILES b/tagbase_server/.openapi-generator/FILES index ba6c8af..965acd0 100644 --- a/tagbase_server/.openapi-generator/FILES +++ b/tagbase_server/.openapi-generator/FILES @@ -4,12 +4,17 @@ README.md git_push.sh tagbase_server/__init__.py tagbase_server/controllers/__init__.py +tagbase_server/controllers/events_controller.py tagbase_server/controllers/ingest_controller.py tagbase_server/controllers/security_controller_.py tagbase_server/controllers/tags_controller.py tagbase_server/encoder.py tagbase_server/models/__init__.py tagbase_server/models/base_model_.py +tagbase_server/models/event200.py +tagbase_server/models/event_put200.py +tagbase_server/models/events200.py +tagbase_server/models/events200_events_inner.py tagbase_server/models/ingest200.py tagbase_server/models/response500.py tagbase_server/models/tag200.py diff --git a/tagbase_server/.openapi-generator/VERSION b/tagbase_server/.openapi-generator/VERSION index f4965a3..e7e42a4 100644 --- a/tagbase_server/.openapi-generator/VERSION +++ b/tagbase_server/.openapi-generator/VERSION @@ -1 +1 @@ -6.0.0 \ No newline at end of file +6.3.0 \ No newline at end of file diff --git a/tagbase_server/Dockerfile b/tagbase_server/Dockerfile index ba47906..36495ab 100644 --- a/tagbase_server/Dockerfile +++ b/tagbase_server/Dockerfile @@ -1,14 +1,14 @@ FROM python:slim-bullseye RUN mkdir -p /usr/src/app -RUN mkdir -p /usr/src/app/logs WORKDIR /usr/src/app COPY requirements.txt /usr/src/app/ RUN apt update && \ apt -y upgrade && \ - apt -y install bash gcc musl-dev tzdata && \ + apt -y install bash curl gcc musl-dev tzdata && \ + python3 -m pip install --upgrade pip && \ python3 -m pip install -r requirements.txt --no-cache-dir RUN ln -fs /usr/share/zoneinfo/Etc/UTC /etc/localtime @@ -16,7 +16,8 @@ RUN ln -fs /usr/share/zoneinfo/Etc/UTC /etc/localtime COPY . /usr/src/app EXPOSE 5433 +EXPOSE 5555 ENTRYPOINT ["gunicorn"] -CMD ["tagbase_server.__main__:app"] +CMD ["tagbase_server.__main__:app", "--preload"] diff --git a/tagbase_server/README.md b/tagbase_server/README.md index a51b46d..e936b33 100644 --- a/tagbase_server/README.md +++ b/tagbase_server/README.md @@ -21,13 +21,13 @@ python3 -m tagbase_server and open your browser to here: ``` -http://localhost:8080/v0.7.0/ui/ +http://localhost:8080/tagbase/api/v0.7.0/ui/ ``` Your OpenAPI definition lives here: ``` -http://localhost:8080/v0.7.0/openapi.json +http://localhost:8080/tagbase/api/v0.7.0/openapi.json ``` To launch the integration tests, use tox: diff --git a/tagbase_server/pyproject.toml b/tagbase_server/pyproject.toml index b3ace2b..8619a9f 100644 --- a/tagbase_server/pyproject.toml +++ b/tagbase_server/pyproject.toml @@ -6,6 +6,7 @@ requires = [ "pandas>=1.4.2", "parmap>=1.5.3", "patool>=1.12", + "pika>=1.3.1", "psycopg2-binary==2.9.3", "python_dateutil>=2.6.0", "pytz>=2021.3", diff --git a/tagbase_server/requirements.txt b/tagbase_server/requirements.txt index 6cc60d1..6da3683 100644 --- a/tagbase_server/requirements.txt +++ b/tagbase_server/requirements.txt @@ -5,6 +5,7 @@ gunicorn==20.1.0 pandas>=1.4.2 parmap>=1.5.3 patool>=1.12 +pika>=1.3.1 psycopg2-binary==2.9.5 python_dateutil>=2.6.0 pytz>=2021.3 diff --git a/tagbase_server/setup.py b/tagbase_server/setup.py index abbf59e..2730363 100644 --- a/tagbase_server/setup.py +++ b/tagbase_server/setup.py @@ -20,6 +20,7 @@ "pandas>=1.4.2", "parmap>=1.5.3", "patool>=1.12", + "pika>=1.3.1", "psycopg2-binary==2.9.3", "python_dateutil>=2.6.0", "pytz>=2021.3", diff --git a/tagbase_server/tagbase_server/__main__.py b/tagbase_server/tagbase_server/__main__.py index 88d19e3..288722b 100644 --- a/tagbase_server/tagbase_server/__main__.py +++ b/tagbase_server/tagbase_server/__main__.py @@ -10,21 +10,25 @@ LOGGER_NAME = "tagbase_server" -if not os.path.exists("./logs"): - os.makedirs("./logs") +os.makedirs("./logs/{}".format(LOGGER_NAME), exist_ok=True) logger = logging.getLogger(LOGGER_NAME) if logger.hasHandlers(): logger.handlers = [] logger.setLevel(logging.INFO) -formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") +formatter = logging.Formatter( + "%(asctime)s - %(filename)s:%(lineno)d - %(levelname)s - %(message)s" +) s_handler = logging.StreamHandler() s_handler.setFormatter(formatter) logger.addHandler(s_handler) rf_handler = RotatingFileHandler( - f"./logs/{LOGGER_NAME}_log.txt", mode="a", maxBytes=100000, backupCount=10 + f"./logs/{LOGGER_NAME}/{LOGGER_NAME}_log.txt", + mode="a", + maxBytes=100000, + backupCount=10, ) rf_handler.setFormatter(formatter) logger.addHandler(rf_handler) diff --git a/tagbase_server/tagbase_server/controllers/events_controller.py b/tagbase_server/tagbase_server/controllers/events_controller.py new file mode 100644 index 0000000..a893eb5 --- /dev/null +++ b/tagbase_server/tagbase_server/controllers/events_controller.py @@ -0,0 +1,134 @@ +from tagbase_server.models.event200 import Event200 # noqa: E501 +from tagbase_server.models.event_put200 import EventPut200 # noqa: E501 +from tagbase_server.models.events200 import Events200 # noqa: E501 +from tagbase_server.models.response500 import Response500 # noqa: E501 +from tagbase_server.utils.db_utils import connect +from tagbase_server import util + +import logging + +logger = logging.getLogger(__name__) + + +def get_event(event_id): # noqa: E501 + """Get information about an individual event + + Get information about an individual event # noqa: E501 + + :param event_id: Event UUID + :type event_id: str + :type event_id: str + + :rtype: Union[Event200, Tuple[Event200, int], Tuple[Event200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "SELECT * FROM events_log WHERE event_id = %s", + (event_id,), + ) + result = cur.fetchone() + return Event200.from_dict( + { + "submission_id": result[0], + "tag_id": result[1], + "event_id": str(result[2]), + "event_category": result[3], + "event_name": result[4], + "time_start": result[5], + "time_end": result[6], + "duration": result[7], + "event_status": result[8], + "event_notes": result[9], + } + ) + + +def list_all_events(): # noqa: E501 + """Get information about all events + + Get information about all events # noqa: E501 + + + :rtype: Union[Events200, Tuple[Events200, int], Tuple[Events200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "SELECT DISTINCT event_id, tag_id, submission_id FROM events_log ORDER BY tag_id", + ) + events = [] + for event in cur.fetchall(): + events.append( + { + "event_id": str(event[0]), + "tag_id": event[1], + "submission_id": event[2], + } + ) + cur.execute( + "SELECT COUNT(DISTINCT event_id) FROM events_log", + ) + count = cur.fetchone()[0] + return Events200.from_dict({"count": count, "events": events}) + + +def list_events(tag_id, sub_id): # noqa: E501 + """Get all events for a given tag submission + + Get all events for a given tag submission # noqa: E501 + + :param tag_id: Numeric tag ID + :type tag_id: + :param sub_id: Numeric submission ID + :type sub_id: + + :rtype: Union[Events200, Tuple[Events200, int], Tuple[Events200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + cur.execute( + "SELECT DISTINCT event_id" + "FROM events_log WHERE tag_id = %s AND submission_id = %s ORDER BY tag_id", + (tag_id, sub_id), + ) + events = [] + for event in cur.fetchall(): + events.append( + { + "event_id": str(event[0]), + } + ) + cur.execute( + "SELECT COUNT(DISTINCT event_id) FROM events_log", + ) + count = cur.fetchone()[0] + return Events200.from_dict({"count": count, "events": events}) + + +def put_event(event_id, notes=None): # noqa: E501 + """Update the 'notes' associated with an event + + Update notes for an event # noqa: E501 + + :param event_id: Event UUID + :type event_id: str + :type event_id: str + :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) + :type notes: str + + :rtype: Union[EventPut200, Tuple[EventPut200, int], Tuple[EventPut200, int, Dict[str, str]] + """ + conn = connect() + with conn: + with conn.cursor() as cur: + if notes is not None: + cur.execute( + "UPDATE events_log SET event_notes = %s WHERE event_id = %s", + (notes, event_id), + ) + message = f"Event: '{str(event_id)}' successfully updated." + return EventPut200.from_dict({"code": "200", "message": message}) diff --git a/tagbase_server/tagbase_server/controllers/ingest_controller.py b/tagbase_server/tagbase_server/controllers/ingest_controller.py index 9ae799e..0dcece2 100644 --- a/tagbase_server/tagbase_server/controllers/ingest_controller.py +++ b/tagbase_server/tagbase_server/controllers/ingest_controller.py @@ -4,6 +4,7 @@ import parmap from tagbase_server.models.ingest200 import Ingest200 # noqa: E501 +from tagbase_server.models.response500 import Response500 # noqa: E501 from tagbase_server.utils.io_utils import ( process_get_input_data, process_post_input_data, @@ -59,23 +60,21 @@ def ingest_get(file, notes=None, type=None, version=None): # noqa: E501 ) -def ingest_post( - filename=None, notes=None, type=None, version=None, body=None -): # noqa: E501 +def ingest_post(filename, body, notes=None, type=None, version=None): # noqa: E501 """Post a local file and perform a ingest operation Post a local file and perform a ingest operation # noqa: E501 - :param notes: Free-form text field to explicitly define the name of the file to be persisted - :type notes: str + :param filename: Free-form text field to explicitly define the name of the file to be persisted + :type filename: str + :param body: + :type body: str :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) :type notes: str :param type: Type of file to be ingested, defaults to 'etuff' :type type: str :param version: Version identifier for the eTUFF tag data file ingested :type version: str - :param body: Payload body - :type body: str :rtype: Union[Ingest200, Tuple[Ingest200, int], Tuple[Ingest200, int, Dict[str, str]] """ diff --git a/tagbase_server/tagbase_server/controllers/tags_controller.py b/tagbase_server/tagbase_server/controllers/tags_controller.py index 02dc4b4..115b94e 100644 --- a/tagbase_server/tagbase_server/controllers/tags_controller.py +++ b/tagbase_server/tagbase_server/controllers/tags_controller.py @@ -1,11 +1,9 @@ -from tagbase_server.utils.db_utils import connect - +from tagbase_server.models.response500 import Response500 # noqa: E501 from tagbase_server.models.tag200 import Tag200 # noqa: E501 from tagbase_server.models.tag_put200 import TagPut200 # noqa: E501 - -import logging - -logger = logging.getLogger(__name__) +from tagbase_server.models.tags200 import Tags200 # noqa: E501 +from tagbase_server.utils.db_utils import connect +from tagbase_server import util def get_tag(tag_id): # noqa: E501 @@ -13,7 +11,7 @@ def get_tag(tag_id): # noqa: E501 Get information about an individual tag # noqa: E501 - :param tag_id: Existing tag id + :param tag_id: Numeric tag ID :type tag_id: :rtype: Union[Tag200, Tuple[Tag200, int], Tuple[Tag200, int, Dict[str, str]] @@ -78,9 +76,9 @@ def put_tag(tag_id, sub_id, notes=None, version=None): # noqa: E501 Update a tag submission # noqa: E501 - :param tag_id: Existing tag id + :param tag_id: Numeric tag ID :type tag_id: - :param sub_id: Existing submission id for an existing tag + :param sub_id: Numeric submission ID :type sub_id: :param notes: Free-form text field where details of submitted eTUFF file for ingest can be provided e.g. submitter name, etuff data contents (tag metadata and measurements + primary position data, or just secondary solution-positional meta/data) :type notes: str diff --git a/tagbase_server/tagbase_server/models/__init__.py b/tagbase_server/tagbase_server/models/__init__.py index 8880414..5e9ac12 100644 --- a/tagbase_server/tagbase_server/models/__init__.py +++ b/tagbase_server/tagbase_server/models/__init__.py @@ -4,6 +4,10 @@ from __future__ import absolute_import # import models into model package +from tagbase_server.models.event200 import Event200 +from tagbase_server.models.event_put200 import EventPut200 +from tagbase_server.models.events200 import Events200 +from tagbase_server.models.events200_events_inner import Events200EventsInner from tagbase_server.models.ingest200 import Ingest200 from tagbase_server.models.response500 import Response500 from tagbase_server.models.tag200 import Tag200 diff --git a/tagbase_server/tagbase_server/models/base_model_.py b/tagbase_server/tagbase_server/models/base_model_.py index 4ef04ae..01f878e 100644 --- a/tagbase_server/tagbase_server/models/base_model_.py +++ b/tagbase_server/tagbase_server/models/base_model_.py @@ -8,7 +8,7 @@ T = typing.TypeVar("T") -class Model: +class Model(object): # openapiTypes: The key is attribute name and the # value is attribute type. openapi_types: typing.Dict[str, type] = {} diff --git a/tagbase_server/tagbase_server/models/event200.py b/tagbase_server/tagbase_server/models/event200.py new file mode 100644 index 0000000..ddf591a --- /dev/null +++ b/tagbase_server/tagbase_server/models/event200.py @@ -0,0 +1,337 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server import util + + +class Event200(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__( + self, + event_category=None, + event_id=None, + event_name=None, + event_notes=None, + event_status=None, + time_start=None, + time_end=None, + duration=None, + submission_id=None, + tag_id=None, + ): # noqa: E501 + """Event200 - a model defined in OpenAPI + + :param event_category: The event_category of this Event200. # noqa: E501 + :type event_category: str + :param event_id: The event_id of this Event200. # noqa: E501 + :type event_id: str + :param event_name: The event_name of this Event200. # noqa: E501 + :type event_name: str + :param event_notes: The event_notes of this Event200. # noqa: E501 + :type event_notes: str + :param event_status: The event_status of this Event200. # noqa: E501 + :type event_status: str + :param time_start: The time_start of this Event200. # noqa: E501 + :type time_start: str + :param time_end: The time_end of this Event200. # noqa: E501 + :type time_end: str + :param duration: The duration of this Event200. # noqa: E501 + :type duration: str + :param submission_id: The submission_id of this Event200. # noqa: E501 + :type submission_id: int + :param tag_id: The tag_id of this Event200. # noqa: E501 + :type tag_id: int + """ + self.openapi_types = { + "event_category": str, + "event_id": str, + "event_name": str, + "event_notes": str, + "event_status": str, + "time_start": str, + "time_end": str, + "duration": str, + "submission_id": int, + "tag_id": int, + } + + self.attribute_map = { + "event_category": "event_category", + "event_id": "event_id", + "event_name": "event_name", + "event_notes": "event_notes", + "event_status": "event_status", + "time_start": "time_start", + "time_end": "time_end", + "duration": "duration", + "submission_id": "submission_id", + "tag_id": "tag_id", + } + + self._event_category = event_category + self._event_id = event_id + self._event_name = event_name + self._event_notes = event_notes + self._event_status = event_status + self._time_start = time_start + self._time_end = time_end + self._duration = duration + self._submission_id = submission_id + self._tag_id = tag_id + + @classmethod + def from_dict(cls, dikt) -> "Event200": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The event200 of this Event200. # noqa: E501 + :rtype: Event200 + """ + return util.deserialize_model(dikt, cls) + + @property + def event_category(self): + """Gets the event_category of this Event200. + + ... # noqa: E501 + + :return: The event_category of this Event200. + :rtype: str + """ + return self._event_category + + @event_category.setter + def event_category(self, event_category): + """Sets the event_category of this Event200. + + ... # noqa: E501 + + :param event_category: The event_category of this Event200. + :type event_category: str + """ + + self._event_category = event_category + + @property + def event_id(self): + """Gets the event_id of this Event200. + + UUID associated with a particular event # noqa: E501 + + :return: The event_id of this Event200. + :rtype: str + """ + return self._event_id + + @event_id.setter + def event_id(self, event_id): + """Sets the event_id of this Event200. + + UUID associated with a particular event # noqa: E501 + + :param event_id: The event_id of this Event200. + :type event_id: str + """ + + self._event_id = event_id + + @property + def event_name(self): + """Gets the event_name of this Event200. + + ... # noqa: E501 + + :return: The event_name of this Event200. + :rtype: str + """ + return self._event_name + + @event_name.setter + def event_name(self, event_name): + """Sets the event_name of this Event200. + + ... # noqa: E501 + + :param event_name: The event_name of this Event200. + :type event_name: str + """ + + self._event_name = event_name + + @property + def event_notes(self): + """Gets the event_notes of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :return: The event_notes of this Event200. + :rtype: str + """ + return self._event_notes + + @event_notes.setter + def event_notes(self, event_notes): + """Sets the event_notes of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :param event_notes: The event_notes of this Event200. + :type event_notes: str + """ + + self._event_notes = event_notes + + @property + def event_status(self): + """Gets the event_status of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :return: The event_status of this Event200. + :rtype: str + """ + return self._event_status + + @event_status.setter + def event_status(self, event_status): + """Sets the event_status of this Event200. + + Free-form text field where details of the event can be optionally entered by the client # noqa: E501 + + :param event_status: The event_status of this Event200. + :type event_status: str + """ + allowed_values = ["failed", "finished", "killed", "running"] # noqa: E501 + if event_status not in allowed_values: + raise ValueError( + "Invalid value for `event_status` ({0}), must be one of {1}".format( + event_status, allowed_values + ) + ) + + self._event_status = event_status + + @property + def time_start(self): + """Gets the time_start of this Event200. + + Local datetime stamp at the time of the event start # noqa: E501 + + :return: The time_start of this Event200. + :rtype: str + """ + return self._time_start + + @time_start.setter + def time_start(self, time_start): + """Sets the time_start of this Event200. + + Local datetime stamp at the time of the event start # noqa: E501 + + :param time_start: The time_start of this Event200. + :type time_start: str + """ + + self._time_start = time_start + + @property + def time_end(self): + """Gets the time_end of this Event200. + + Local datetime stamp at the time of the event end # noqa: E501 + + :return: The time_end of this Event200. + :rtype: str + """ + return self._time_end + + @time_end.setter + def time_end(self, time_end): + """Sets the time_end of this Event200. + + Local datetime stamp at the time of the event end # noqa: E501 + + :param time_end: The time_end of this Event200. + :type time_end: str + """ + + self._time_end = time_end + + @property + def duration(self): + """Gets the duration of this Event200. + + The event duration e.g. different between 'time_start' and 'time_end' # noqa: E501 + + :return: The duration of this Event200. + :rtype: str + """ + return self._duration + + @duration.setter + def duration(self, duration): + """Sets the duration of this Event200. + + The event duration e.g. different between 'time_start' and 'time_end' # noqa: E501 + + :param duration: The duration of this Event200. + :type duration: str + """ + + self._duration = duration + + @property + def submission_id(self): + """Gets the submission_id of this Event200. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :return: The submission_id of this Event200. + :rtype: int + """ + return self._submission_id + + @submission_id.setter + def submission_id(self, submission_id): + """Sets the submission_id of this Event200. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :param submission_id: The submission_id of this Event200. + :type submission_id: int + """ + + self._submission_id = submission_id + + @property + def tag_id(self): + """Gets the tag_id of this Event200. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :return: The tag_id of this Event200. + :rtype: int + """ + return self._tag_id + + @tag_id.setter + def tag_id(self, tag_id): + """Sets the tag_id of this Event200. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :param tag_id: The tag_id of this Event200. + :type tag_id: int + """ + + self._tag_id = tag_id diff --git a/tagbase_server/tagbase_server/models/event_put200.py b/tagbase_server/tagbase_server/models/event_put200.py new file mode 100644 index 0000000..7e0d9bb --- /dev/null +++ b/tagbase_server/tagbase_server/models/event_put200.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server import util + + +class EventPut200(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, code=None, message=None): # noqa: E501 + """EventPut200 - a model defined in OpenAPI + + :param code: The code of this EventPut200. # noqa: E501 + :type code: str + :param message: The message of this EventPut200. # noqa: E501 + :type message: str + """ + self.openapi_types = {"code": str, "message": str} + + self.attribute_map = {"code": "code", "message": "message"} + + self._code = code + self._message = message + + @classmethod + def from_dict(cls, dikt) -> "EventPut200": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The eventPut200 of this EventPut200. # noqa: E501 + :rtype: EventPut200 + """ + return util.deserialize_model(dikt, cls) + + @property + def code(self): + """Gets the code of this EventPut200. + + HTTP status code # noqa: E501 + + :return: The code of this EventPut200. + :rtype: str + """ + return self._code + + @code.setter + def code(self, code): + """Sets the code of this EventPut200. + + HTTP status code # noqa: E501 + + :param code: The code of this EventPut200. + :type code: str + """ + + self._code = code + + @property + def message(self): + """Gets the message of this EventPut200. + + A string detailing specifics of the HTTP operation # noqa: E501 + + :return: The message of this EventPut200. + :rtype: str + """ + return self._message + + @message.setter + def message(self, message): + """Sets the message of this EventPut200. + + A string detailing specifics of the HTTP operation # noqa: E501 + + :param message: The message of this EventPut200. + :type message: str + """ + + self._message = message diff --git a/tagbase_server/tagbase_server/models/events200.py b/tagbase_server/tagbase_server/models/events200.py new file mode 100644 index 0000000..7563619 --- /dev/null +++ b/tagbase_server/tagbase_server/models/events200.py @@ -0,0 +1,93 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server.models.events200_events_inner import Events200EventsInner +from tagbase_server import util + +from tagbase_server.models.events200_events_inner import ( + Events200EventsInner, +) # noqa: E501 + + +class Events200(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, count=None, events=None): # noqa: E501 + """Events200 - a model defined in OpenAPI + + :param count: The count of this Events200. # noqa: E501 + :type count: int + :param events: The events of this Events200. # noqa: E501 + :type events: List[Events200EventsInner] + """ + self.openapi_types = {"count": int, "events": List[Events200EventsInner]} + + self.attribute_map = {"count": "count", "events": "events"} + + self._count = count + self._events = events + + @classmethod + def from_dict(cls, dikt) -> "Events200": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The events200 of this Events200. # noqa: E501 + :rtype: Events200 + """ + return util.deserialize_model(dikt, cls) + + @property + def count(self): + """Gets the count of this Events200. + + Total count of unique events # noqa: E501 + + :return: The count of this Events200. + :rtype: int + """ + return self._count + + @count.setter + def count(self, count): + """Sets the count of this Events200. + + Total count of unique events # noqa: E501 + + :param count: The count of this Events200. + :type count: int + """ + + self._count = count + + @property + def events(self): + """Gets the events of this Events200. + + List of event UUID's and associated tag and submission IDs # noqa: E501 + + :return: The events of this Events200. + :rtype: List[Events200EventsInner] + """ + return self._events + + @events.setter + def events(self, events): + """Sets the events of this Events200. + + List of event UUID's and associated tag and submission IDs # noqa: E501 + + :param events: The events of this Events200. + :type events: List[Events200EventsInner] + """ + + self._events = events diff --git a/tagbase_server/tagbase_server/models/events200_events_inner.py b/tagbase_server/tagbase_server/models/events200_events_inner.py new file mode 100644 index 0000000..636d92d --- /dev/null +++ b/tagbase_server/tagbase_server/models/events200_events_inner.py @@ -0,0 +1,118 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from tagbase_server.models.base_model_ import Model +from tagbase_server import util + + +class Events200EventsInner(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, event_id=None, tag_id=None, submission_id=None): # noqa: E501 + """Events200EventsInner - a model defined in OpenAPI + + :param event_id: The event_id of this Events200EventsInner. # noqa: E501 + :type event_id: str + :param tag_id: The tag_id of this Events200EventsInner. # noqa: E501 + :type tag_id: int + :param submission_id: The submission_id of this Events200EventsInner. # noqa: E501 + :type submission_id: int + """ + self.openapi_types = {"event_id": str, "tag_id": int, "submission_id": int} + + self.attribute_map = { + "event_id": "event_id", + "tag_id": "tag_id", + "submission_id": "submission_id", + } + + self._event_id = event_id + self._tag_id = tag_id + self._submission_id = submission_id + + @classmethod + def from_dict(cls, dikt) -> "Events200EventsInner": + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The events200_events_inner of this Events200EventsInner. # noqa: E501 + :rtype: Events200EventsInner + """ + return util.deserialize_model(dikt, cls) + + @property + def event_id(self): + """Gets the event_id of this Events200EventsInner. + + UUID associated with a particular event # noqa: E501 + + :return: The event_id of this Events200EventsInner. + :rtype: str + """ + return self._event_id + + @event_id.setter + def event_id(self, event_id): + """Sets the event_id of this Events200EventsInner. + + UUID associated with a particular event # noqa: E501 + + :param event_id: The event_id of this Events200EventsInner. + :type event_id: str + """ + + self._event_id = event_id + + @property + def tag_id(self): + """Gets the tag_id of this Events200EventsInner. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :return: The tag_id of this Events200EventsInner. + :rtype: int + """ + return self._tag_id + + @tag_id.setter + def tag_id(self, tag_id): + """Sets the tag_id of this Events200EventsInner. + + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 + + :param tag_id: The tag_id of this Events200EventsInner. + :type tag_id: int + """ + + self._tag_id = tag_id + + @property + def submission_id(self): + """Gets the submission_id of this Events200EventsInner. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :return: The submission_id of this Events200EventsInner. + :rtype: int + """ + return self._submission_id + + @submission_id.setter + def submission_id(self, submission_id): + """Sets the submission_id of this Events200EventsInner. + + Unique numeric ID assigned upon submission of a tag eTUFF data file for ingest/importation into Tagbase # noqa: E501 + + :param submission_id: The submission_id of this Events200EventsInner. + :type submission_id: int + """ + + self._submission_id = submission_id diff --git a/tagbase_server/tagbase_server/models/ingest200.py b/tagbase_server/tagbase_server/models/ingest200.py index 3d220a7..b5cf3c7 100644 --- a/tagbase_server/tagbase_server/models/ingest200.py +++ b/tagbase_server/tagbase_server/models/ingest200.py @@ -43,7 +43,7 @@ def from_dict(cls, dikt) -> "Ingest200": :param dikt: A dict. :type: dict - :return: The Ingest200 of this Ingest200. # noqa: E501 + :return: The ingest200 of this Ingest200. # noqa: E501 :rtype: Ingest200 """ return util.deserialize_model(dikt, cls) diff --git a/tagbase_server/tagbase_server/models/response500.py b/tagbase_server/tagbase_server/models/response500.py index c2ff82c..a4ba4c5 100644 --- a/tagbase_server/tagbase_server/models/response500.py +++ b/tagbase_server/tagbase_server/models/response500.py @@ -54,7 +54,7 @@ def from_dict(cls, dikt) -> "Response500": :param dikt: A dict. :type: dict - :return: The Response500 of this Response500. # noqa: E501 + :return: The response500 of this Response500. # noqa: E501 :rtype: Response500 """ return util.deserialize_model(dikt, cls) @@ -109,7 +109,7 @@ def message(self, message): def more_info(self): """Gets the more_info of this Response500. - Additional details (if available) to diagnose the 500 response. # noqa: E501 + Additional details (if available) to diagnose the 500 response # noqa: E501 :return: The more_info of this Response500. :rtype: str @@ -120,7 +120,7 @@ def more_info(self): def more_info(self, more_info): """Sets the more_info of this Response500. - Additional details (if available) to diagnose the 500 response. # noqa: E501 + Additional details (if available) to diagnose the 500 response # noqa: E501 :param more_info: The more_info of this Response500. :type more_info: str diff --git a/tagbase_server/tagbase_server/models/tag200.py b/tagbase_server/tagbase_server/models/tag200.py index 70b67b0..7698fe7 100644 --- a/tagbase_server/tagbase_server/models/tag200.py +++ b/tagbase_server/tagbase_server/models/tag200.py @@ -36,7 +36,7 @@ def from_dict(cls, dikt) -> "Tag200": :param dikt: A dict. :type: dict - :return: The Tag200 of this Tag200. # noqa: E501 + :return: The tag200 of this Tag200. # noqa: E501 :rtype: Tag200 """ return util.deserialize_model(dikt, cls) @@ -45,7 +45,7 @@ def from_dict(cls, dikt) -> "Tag200": def tag(self): """Gets the tag of this Tag200. - List containing one or more submissions for a given tag # noqa: E501 + List containing submissions for a given tag # noqa: E501 :return: The tag of this Tag200. :rtype: List[Tag200TagInner] @@ -56,7 +56,7 @@ def tag(self): def tag(self, tag): """Sets the tag of this Tag200. - List containing one or more submissions for a given tag # noqa: E501 + List containing submissions for a given tag # noqa: E501 :param tag: The tag of this Tag200. :type tag: List[Tag200TagInner] diff --git a/tagbase_server/tagbase_server/models/tag200_tag_inner.py b/tagbase_server/tagbase_server/models/tag200_tag_inner.py index a46eae9..3f53775 100644 --- a/tagbase_server/tagbase_server/models/tag200_tag_inner.py +++ b/tagbase_server/tagbase_server/models/tag200_tag_inner.py @@ -76,7 +76,7 @@ def from_dict(cls, dikt) -> "Tag200TagInner": :param dikt: A dict. :type: dict - :return: The Tag200_tag_inner of this Tag200TagInner. # noqa: E501 + :return: The tag200_tag_inner of this Tag200TagInner. # noqa: E501 :rtype: Tag200TagInner """ return util.deserialize_model(dikt, cls) @@ -200,7 +200,7 @@ def submission_id(self, submission_id): def tag_id(self): """Gets the tag_id of this Tag200TagInner. - Unique numeric Tag ID associated with the ingested tag eTUFF data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :return: The tag_id of this Tag200TagInner. :rtype: int @@ -211,7 +211,7 @@ def tag_id(self): def tag_id(self, tag_id): """Sets the tag_id of this Tag200TagInner. - Unique numeric Tag ID associated with the ingested tag eTUFF data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :param tag_id: The tag_id of this Tag200TagInner. :type tag_id: int diff --git a/tagbase_server/tagbase_server/models/tag_put200.py b/tagbase_server/tagbase_server/models/tag_put200.py index 1f604cd..96fcf9f 100644 --- a/tagbase_server/tagbase_server/models/tag_put200.py +++ b/tagbase_server/tagbase_server/models/tag_put200.py @@ -36,7 +36,7 @@ def from_dict(cls, dikt) -> "TagPut200": :param dikt: A dict. :type: dict - :return: The TagPut200 of this TagPut200. # noqa: E501 + :return: The tagPut200 of this TagPut200. # noqa: E501 :rtype: TagPut200 """ return util.deserialize_model(dikt, cls) diff --git a/tagbase_server/tagbase_server/models/tags200.py b/tagbase_server/tagbase_server/models/tags200.py index 65ab227..721dda2 100644 --- a/tagbase_server/tagbase_server/models/tags200.py +++ b/tagbase_server/tagbase_server/models/tags200.py @@ -39,7 +39,7 @@ def from_dict(cls, dikt) -> "Tags200": :param dikt: A dict. :type: dict - :return: The Tags200 of this Tags200. # noqa: E501 + :return: The tags200 of this Tags200. # noqa: E501 :rtype: Tags200 """ return util.deserialize_model(dikt, cls) diff --git a/tagbase_server/tagbase_server/models/tags200_tags_inner.py b/tagbase_server/tagbase_server/models/tags200_tags_inner.py index f6819b3..2076970 100644 --- a/tagbase_server/tagbase_server/models/tags200_tags_inner.py +++ b/tagbase_server/tagbase_server/models/tags200_tags_inner.py @@ -36,7 +36,7 @@ def from_dict(cls, dikt) -> "Tags200TagsInner": :param dikt: A dict. :type: dict - :return: The Tags200_tags_inner of this Tags200TagsInner. # noqa: E501 + :return: The tags200_tags_inner of this Tags200TagsInner. # noqa: E501 :rtype: Tags200TagsInner """ return util.deserialize_model(dikt, cls) @@ -45,7 +45,7 @@ def from_dict(cls, dikt) -> "Tags200TagsInner": def tag_id(self): """Gets the tag_id of this Tags200TagsInner. - Unique numeric Tag ID associated with the ingested tag data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :return: The tag_id of this Tags200TagsInner. :rtype: int @@ -56,7 +56,7 @@ def tag_id(self): def tag_id(self, tag_id): """Sets the tag_id of this Tags200TagsInner. - Unique numeric Tag ID associated with the ingested tag data file # noqa: E501 + Unique numeric tag ID associated with the ingested tag data file # noqa: E501 :param tag_id: The tag_id of this Tags200TagsInner. :type tag_id: int diff --git a/tagbase_server/tagbase_server/openapi/openapi.yaml b/tagbase_server/tagbase_server/openapi/openapi.yaml index 2edd778..3b1d8cc 100644 --- a/tagbase_server/tagbase_server/openapi/openapi.yaml +++ b/tagbase_server/tagbase_server/openapi/openapi.yaml @@ -4,10 +4,10 @@ info: email: tagtuna@gmail.com name: Tagbase Dev Team url: https://github.com/tagbase/tagbase-server/issues - description: "tagbse-server provides HTTP endpoints for ingestion of various files\ - \ \\\ninto a Tagbase SQL database. Input file support currently includes eTUFF\ - \ (see [here](https://doi.org/10.6084/m9.figshare.10032848.v4) \\\nand [here](https://doi.org/10.6084/m9.figshare.10159820.v1)).\ - \ The REST API complies with [OpenAPI v3.0.3](https://spec.openapis.org/oas/v3.0.3.html).\n" + description: | + tagbse-server provides HTTP endpoints for ingestion of various files \ + into a Tagbase SQL database. Input file support currently includes eTUFF (see [here](https://doi.org/10.6084/m9.figshare.10032848.v4) \ + and [here](https://doi.org/10.6084/m9.figshare.10159820.v1)). license: name: Apache License v2.0 url: https://www.apache.org/licenses/LICENSE-2.0 @@ -20,11 +20,107 @@ servers: - description: ICCAT Test tagbase-server url: https://162.13.162.49/tagbase/api/v0.7.0 tags: -- description: Ingestion operations. +- description: Event Operations + name: events +- description: Ingestion operations name: ingest -- description: Tag Operations. +- description: Tag Operations name: tags paths: + /events: + get: + description: Get information about all events + operationId: list_all_events + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: A list containing all events. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about all events + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + /events/{event_id}: + get: + description: Get information about an individual event + operationId: get_event + parameters: + - description: Event UUID + explode: true + in: path + name: event_id + required: true + schema: + $ref: '#/components/schemas/event_id' + style: simple + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/event200' + description: Information about an individual tag + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Get information about an individual event + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller + put: + description: Update notes for an event + operationId: put_event + parameters: + - description: Event UUID + explode: true + in: path + name: event_id + required: true + schema: + $ref: '#/components/schemas/event_id' + style: simple + - description: "Free-form text field where details of submitted eTUFF file for\ + \ ingest can be provided e.g. submitter name, etuff data contents (tag metadata\ + \ and measurements + primary position data, or just secondary solution-positional\ + \ meta/data)" + explode: true + in: query + name: notes + required: false + schema: + maxLength: 10000 + minLength: 1 + pattern: ^(?!\s*$).+ + type: string + style: form + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/eventPut200' + description: A success message confirming ingestion. + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + summary: Update the 'notes' associated with an event + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller /ingest: get: description: Get network accessible file and execute ingestion @@ -84,13 +180,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get network accessible file and execute ingestion tags: @@ -100,7 +196,8 @@ paths: description: Post a local file and perform a ingest operation operationId: ingest_post parameters: - - description: "Free-form text field to explicitly define the name of the file to be persisted" + - description: Free-form text field to explicitly define the name of the file + to be persisted explode: true in: query name: filename @@ -152,7 +249,7 @@ paths: content: application/octet-stream: schema: - description: compressed binary file containing one or more eTUFF files + description: Compressed binary file containing one or more eTUFF files format: binary maxLength: 1000000000 minLength: 1 @@ -171,13 +268,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Ingest200' + $ref: '#/components/schemas/ingest200' description: A success message confirming ingestion. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Post a local file and perform a ingest operation tags: @@ -193,13 +290,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tags200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tags200' + description: A list of all tags. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about all tags tags: @@ -210,7 +307,7 @@ paths: description: Get information about an individual tag operationId: get_tag parameters: - - description: Existing tag id + - description: Numeric tag ID explode: true in: path name: tag_id @@ -223,13 +320,13 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Tag200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tag200' + description: Information about an individual tag. "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' + $ref: '#/components/schemas/response500' description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. summary: Get information about an individual tag tags: @@ -254,7 +351,7 @@ paths: pattern: ^(?!\s*$).+ type: string style: form - - description: Existing tag id + - description: Numeric tag ID explode: true in: path name: tag_id @@ -262,7 +359,7 @@ paths: schema: type: number style: simple - - description: Existing submission id for an existing tag + - description: Numeric submission ID explode: true in: path name: sub_id @@ -286,22 +383,70 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/TagPut200' - description: A success message confirming ingestion. + $ref: '#/components/schemas/tagPut200' + description: Message confirming successful data update "500": content: application/json: schema: - $ref: '#/components/schemas/Response500' - description: Internal tagbase-server error. Contact admin detailed in openapi.yaml. + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml summary: Update the 'notes' and/or 'version' associated with a tag submission tags: - tags x-openapi-router-controller: tagbase_server.controllers.tags_controller + /tags/{tag_id}/subs/{sub_id}/events: + get: + description: Get all events for a given tag submission + operationId: list_events + parameters: + - description: Numeric tag ID + explode: true + in: path + name: tag_id + required: true + schema: + type: number + style: simple + - description: Numeric submission ID + explode: true + in: path + name: sub_id + required: true + schema: + type: number + style: simple + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/events200' + description: Message confirming successful data update + "500": + content: + application/json: + schema: + $ref: '#/components/schemas/response500' + description: Internal tagbase-server error. Contact admin detailed in openapi.yaml + summary: Get all events for a given tag submission + tags: + - events + x-openapi-router-controller: tagbase_server.controllers.events_controller components: parameters: + event_id: + description: Event UUID + explode: true + in: path + name: event_id + required: true + schema: + $ref: '#/components/schemas/event_id' + style: simple filename: - description: Free-form text field to explicitly define the name of the file to be persisted + description: Free-form text field to explicitly define the name of the file + to be persisted explode: true in: query name: filename @@ -328,7 +473,7 @@ components: type: string style: form subId: - description: Existing submission id for an existing tag + description: Numeric submission ID explode: true in: path name: sub_id @@ -337,7 +482,7 @@ components: type: number style: simple tagId: - description: Existing tag id + description: Numeric tag ID explode: true in: path name: tag_id @@ -376,13 +521,132 @@ components: example: eTUFF-sailfish-117259_2.txt title: filename type: string - Ingest200: + event200: + description: Information for a given event + example: + event_id: 06335e84-2872-4914-8c5d-3ed07d2a2dj4 + tag_id: 1 + submission_id: 1 + event_category: submission + event_name: new tag submission + time_start: 2022-04-01T04:58:21.319061+00:00 + time_end: 2022-04-01T04:59:21.319061+00:00 + duration: 0:01:00 + event_status: finished + event_notes: Some user defined notes... + properties: + event_category: + description: '...' + example: '...' + type: string + event_id: + description: UUID associated with a particular event + example: 06335e84-2872-4914-8c5d-3ed07d2a2f16 + format: uuid + title: event_id + type: string + event_name: + description: '...' + example: '...' + type: string + event_notes: + description: Free-form text field where details of the event can be optionally + entered by the client + example: "The event represents a data anomaly with file XYZ because of ...,\ + \ this has been fixed in version ABC." + type: string + event_status: + description: Free-form text field where details of the event can be optionally + entered by the client + enum: + - failed + - finished + - killed + - running + example: failed + type: string + time_start: + description: Local datetime stamp at the time of the event start + example: 2022-04-01T04:58:21.319061+00:00 + type: string + time_end: + description: Local datetime stamp at the time of the event end + example: 2022-04-01T04:58:21.319061+00:00 + type: string + duration: + description: The event duration e.g. different between 'time_start' and + 'time_end' + example: 0:01:00 + type: string + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + tag_id: + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer + title: event200 + type: object + eventPut200: + description: Event update response + example: + code: "200" + message: Notes for event '1' successfully updated. + properties: + code: + description: HTTP status code + example: "200" + type: string + message: + description: A string detailing specifics of the HTTP operation + example: Notes for event '1' successfully updated. + type: string + title: eventPut200 + type: object + event_id: + description: UUID associated with a particular event + example: 06335e84-2872-4914-8c5d-3ed07d2a2f16 + format: uuid + title: event_id + type: string + events200: + description: Response detailing all available events + example: + count: 3 + events: + - event_id: 06335e84-2872-4914-8c5d-3ed07d2a2f16 + tag_id: 1 + submission_id: 1 + - event_id: 06335e84-2872-4914-8c5d-3ed07d2a2dj4 + tag_id: 1 + submission_id: 1 + - event_id: 06335e84-2872-4914-8c5d-3ed07d2a2fkf + tag_id: 1 + submission_id: 1 + properties: + count: + description: Total count of unique events + example: 3 + type: integer + events: + description: List of event UUID's and associated tag and submission IDs + items: + $ref: '#/components/schemas/events200_events_inner' + type: array + title: tags200 + type: object + ingest200: description: HTTP 200 success response example: code: "200" elapsed: 0:00:06.506691 message: Data file eTUFF-sailfish-117259.txt successfully ingested into Tagbase - DB. + DB properties: code: description: HTTP status code @@ -394,11 +658,11 @@ components: message: description: A string detailing specifics of an HTTP operation example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string - title: Ingest200 + title: ingest200 type: object - Response500: + response500: description: 500 Internal Server Error example: code: "200" @@ -413,19 +677,25 @@ components: message: description: A string detailing specifics of the HTTP 500 response example: Data file eTUFF-sailfish-117259.txt successfully ingested into - Tagbase DB. + Tagbase DB type: string more_info: - description: Additional details (if available) to diagnose the 500 response. + description: Additional details (if available) to diagnose the 500 response example: https://httpwg.org/specs/rfc7231.html#status.500 type: string trace: description: Trace diagnostic information related to the response example: 123e4567-e89b-12d3-a456-426614174000 type: string - title: Response500 + title: response500 type: object - Tag200: + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + tag200: description: Information for a given tag example: tag: @@ -456,13 +726,13 @@ components: version: "2" properties: tag: - description: List containing one or more submissions for a given tag + description: List containing submissions for a given tag items: - $ref: '#/components/schemas/Tag200_tag_inner' + $ref: '#/components/schemas/tag200_tag_inner' type: array - title: Tag200 + title: tag200 type: object - TagPut200: + tagPut200: description: HTTP 200 success response example: code: "200" @@ -476,9 +746,14 @@ components: description: A string detailing specifics of an HTTP operation example: Tag '1' submission '5' successfully updated. type: string - title: TagPut200 + title: tagPut200 type: object - Tags200: + tag_id: + description: Unique numeric tag ID associated with the ingested tag data file + example: 1 + title: tag_id + type: integer + tags200: description: Response detailing all available unique tags and associated filename example: count: 2 @@ -495,11 +770,38 @@ components: tags: description: List of unique numeric Tag IDs and associated filename items: - $ref: '#/components/schemas/Tags200_tags_inner' + $ref: '#/components/schemas/tags200_tags_inner' type: array - title: Tags200 + title: tags200 type: object - Tag200_tag_inner: + events200_events_inner: + description: UUID and associated tag and submission ID + example: + event_id: 06335e84-2872-4914-8c5d-3ed07d2a2dj4 + tag_id: 1 + submission_id: 1 + properties: + event_id: + description: UUID associated with a particular event + example: 06335e84-2872-4914-8c5d-3ed07d2a2f16 + format: uuid + title: event_id + type: string + tag_id: + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 + title: tag_id + type: integer + submission_id: + description: Unique numeric ID assigned upon submission of a tag eTUFF data + file for ingest/importation into Tagbase + example: 5 + title: submission_id + type: integer + title: events200_events_inner + type: object + tag200_tag_inner: properties: date_time: description: Local datetime stamp at the time of eTUFF tag data file ingestion @@ -539,9 +841,9 @@ components: title: submission_id type: integer tag_id: - description: Unique numeric Tag ID associated with the ingested tag eTUFF - data file - example: 3 + description: Unique numeric tag ID associated with the ingested tag data + file + example: 1 title: tag_id type: integer version: @@ -549,9 +851,9 @@ components: example: "1" title: version type: string - title: Tag200_tag_inner + title: tag200_tag_inner type: object - Tags200_tags_inner: + tags200_tags_inner: description: Unique numeric Tag ID associated with the ingested tag eTUFF data file example: @@ -559,13 +861,15 @@ components: filename: eTUFF-sailfish-117259_2.txt properties: tag_id: - description: Unique numeric Tag ID associated with the ingested tag data + description: Unique numeric tag ID associated with the ingested tag data file example: 1 + title: tag_id type: integer filename: description: Full name and extension of the ingested eTUFF tag data file - example: eTUFF-sailfish-117259.txt + example: eTUFF-sailfish-117259_2.txt + title: filename type: string - title: Tags200_tags_inner + title: tags200_tags_inner type: object diff --git a/tagbase_server/tagbase_server/test/test_events_controller.py b/tagbase_server/tagbase_server/test/test_events_controller.py new file mode 100644 index 0000000..705ae50 --- /dev/null +++ b/tagbase_server/tagbase_server/test/test_events_controller.py @@ -0,0 +1,83 @@ +# coding: utf-8 + +from __future__ import absolute_import +import unittest + +from flask import json +from six import BytesIO + +from tagbase_server.models.event200 import Event200 # noqa: E501 +from tagbase_server.models.event_put200 import EventPut200 # noqa: E501 +from tagbase_server.models.events200 import Events200 # noqa: E501 +from tagbase_server.models.response500 import Response500 # noqa: E501 +from tagbase_server.test import BaseTestCase + + +class TestEventsController(BaseTestCase): + """EventsController integration test stubs""" + + def test_get_event(self): + """Test case for get_event + + Get information about an individual event + """ + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/events/{event_id}".format(event_id=3.4), + method="GET", + headers=headers, + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + def test_list_all_events(self): + """Test case for list_all_events + + Get information about all events + """ + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/events", method="GET", headers=headers + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + def test_list_events(self): + """Test case for list_events + + Get all events for a given tag submission + """ + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/tags/{tag_id}/subs/{sub_id}/events".format( + tag_id=3.4, sub_id=3.4 + ), + method="GET", + headers=headers, + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + def test_put_event(self): + """Test case for put_event + + Update the 'notes' associated with a event + """ + query_string = [("notes", "notes_example")] + headers = { + "Accept": "application/json", + } + response = self.client.open( + "/tagbase/api/v0.7.0/events/{event_id}".format(event_id=3.4), + method="PUT", + headers=headers, + query_string=query_string, + ) + self.assert500(response, "Response body is : " + response.data.decode("utf-8")) + + +if __name__ == "__main__": + unittest.main() diff --git a/tagbase_server/tagbase_server/utils/db_utils.py b/tagbase_server/tagbase_server/utils/db_utils.py index eb19ae8..8e2e942 100644 --- a/tagbase_server/tagbase_server/utils/db_utils.py +++ b/tagbase_server/tagbase_server/utils/db_utils.py @@ -28,7 +28,7 @@ def connect(): { "code": "500", "message": "Encountered psycopg2.OperationalError when attempting to establish a connection " - "to the Tagbase PostgreSQL database.", + "to the Tagbase database.", "more_info": "Contact the service administrator - {email}".format( email=os.getenv("PGADMIN_DEFAULT_EMAIL") ), diff --git a/tagbase_server/tagbase_server/utils/io_utils.py b/tagbase_server/tagbase_server/utils/io_utils.py index f9e67a1..438c0be 100644 --- a/tagbase_server/tagbase_server/utils/io_utils.py +++ b/tagbase_server/tagbase_server/utils/io_utils.py @@ -46,7 +46,6 @@ def process_get_input_data(file): f.write(chunk) data_file = filename - logger.info(data_file) return data_file @@ -68,7 +67,6 @@ def process_post_input_data(filename, body): with open(filepath, mode="wb") as f: f.write(data) f.close() - logger.info(filepath) return filepath diff --git a/tagbase_server/tagbase_server/utils/processing_utils.py b/tagbase_server/tagbase_server/utils/processing_utils.py index f5bc931..23d026d 100644 --- a/tagbase_server/tagbase_server/utils/processing_utils.py +++ b/tagbase_server/tagbase_server/utils/processing_utils.py @@ -7,11 +7,13 @@ import pandas as pd import psycopg2.extras import pytz + from slack_sdk import WebClient from slack_sdk.errors import SlackApiError from tzlocal import get_localzone from tagbase_server.utils.db_utils import connect +from tagbase_server.utils.rabbitmq_utils import publish_message logger = logging.getLogger(__name__) slack_token = os.environ.get("SLACK_BOT_TOKEN", "") @@ -22,6 +24,13 @@ def process_global_attributes( line, cur, submission_id, metadata, submission_filename, line_counter ): + event_id = uuid.uuid4() + global_start = time.perf_counter() + # publish_message( + # "event_log/create metadata {} populating-metadata-for-new-tag-submission running {}".format( + # event_id, start + # ) + # ) logger.debug("Processing global attribute: %s", line) tokens = line.strip()[1:].split(" = ") logger.debug("Processing token: %s", tokens) @@ -47,10 +56,27 @@ def process_global_attributes( str_submission_id = str(submission_id) str_row = str(rows[0][0]) metadata.append((str_submission_id, str_row, tokens[1])) + global_finish = time.perf_counter() + global_elapsed = round(finish - start, 2) + submission_id = cur.fetchone()[0] + # publish_message( + # "event_log/update {} {} finished {} {} {}".format( + # global_elapsed, event_id, submission_id, submission_id, global_finish + # ) + # ) def process_etuff_file(file, version=None, notes=None): + logger.info("Started processing: %s", file) + import uuid + + event_id = uuid.uuid4() start = time.perf_counter() + publish_message( + "events_log/create submission {} new-tag-submission running {}".format( + event_id, dt.now(tz=pytz.utc).astimezone(get_localzone()) + ) + ) submission_filename = file # full path name is now preferred rather than - file[file.rindex("/") + 1 :] logger.info( "Processing etuff file: %s", @@ -76,13 +102,32 @@ def process_etuff_file(file, version=None, notes=None): "Successful INSERT of '%s' into 'submission' table.", submission_filename, ) - + sub_finish = time.perf_counter() + sub_elapsed = round(sub_finish - start, 2) cur.execute("SELECT currval('submission_submission_id_seq')") submission_id = cur.fetchone()[0] + logger.info(submission_id) + # cur.execute("SELECT tag_id FROM submission WHERE filename = %s", (submission_filename)) + # tag_id = cur.fetchone()[0] + # logger.info(tag_id) + publish_message( + "events_log/update {} {} finished {} {} {}".format( + sub_elapsed, + event_id, + submission_id, + submission_id, + dt.now(tz=pytz.utc).astimezone(get_localzone()), + ) + ) metadata = [] proc_obs = [] s_time = time.perf_counter() + # publish_message( + # "event_log/create submission {} new-tag-submission running {}".format( + # event_id, start + # ) + # ) with open(file, "rb") as data: lines = [line.decode("utf-8", "ignore") for line in data.readlines()] variable_lookup = {} diff --git a/tagbase_server/tagbase_server/utils/rabbitmq_utils.py b/tagbase_server/tagbase_server/utils/rabbitmq_utils.py new file mode 100644 index 0000000..ce26f6a --- /dev/null +++ b/tagbase_server/tagbase_server/utils/rabbitmq_utils.py @@ -0,0 +1,14 @@ +import logging +import pika +import time + +logger = logging.getLogger(__name__) + + +def publish_message(message): + connection = pika.BlockingConnection(pika.ConnectionParameters(host="rabbitmq")) + channel = connection.channel() + channel.queue_declare(queue="events_log") + channel.basic_publish(exchange="", routing_key="events_log", body=message) + logger.info(" [x] Sent: {}".format(message)) + connection.close()