diff --git a/.gitallowed b/.gitallowed
index 616b1577..1bb936ee 100644
--- a/.gitallowed
+++ b/.gitallowed
@@ -4,3 +4,6 @@ uec-core-dos-live-db-12-replica-sf.crvqtzolulpo.eu-west-2.rds.amazonaws.com
eyJhbGciOiJSUzI1NiJ9.eyJqdGkiOiJpZCIsImlhdCI6MTYzMjMyMjg1NCwic3ViIjoiYWRtaW5AbmhzLm5ldCIsImlzcyI6Imlzc3VlciIsImV4cCI6MTYzMjMyNjQ1NCwiY29nbml0bzpncm91cHMiOlsiRlVaWllfQVBJX0FDQ0VTUyIsIkFQSV9VU0VSIiwiUE9TVENPREVfQVBJX0FDQ0VTUyJdfQ.AiD4_0DgTq9Osv8Vh7z5SYXayVkQfBTyM_p6_sMQvp9zVy-aOMBhDuL4cZAz44YRYYeF1XP2hVtVAP8joIKis-_hgoMpFk2eDV9k1vCoM_ORsmO5bvtMwhgJr_feJ5El3sn8rj1Op4L-vBityjog_M8GTdX74CB2mk5N8vZMcsURnGFyHRe7Hak-68sWBFKUO9phy61BY2r-4N-tvdX6rEqUXnEWlGLUH0YtHdwdhy_gFP9Dd1ml9XxHauQI_Ycr7-LuYKNQ2P1BpT7SNc80h4mds5epI20nhu8mdJikO7iyfFdIxbQ-i3ZNNgAiVyOmy-hYeXPC-UszUFhu3NVv6g
macos.mk
+
+.*psycopg2/errorcodes\.py:.*INVALID_PASSWORD = '28P01'
+.*psycopg2/errorcodes\.py:.*INVALID_FOREIGN_KEY = '42830'
diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml
index 17c4132f..b537e866 100644
--- a/.github/workflows/code-quality.yml
+++ b/.github/workflows/code-quality.yml
@@ -7,6 +7,8 @@ on:
jobs:
check-code-quality:
runs-on: ubuntu-latest
+ env:
+ TERM: xterm
steps:
- uses: actions/checkout@v4
with:
diff --git a/.github/workflows/code-security.yml b/.github/workflows/code-security.yml
index 9517af12..cb63ad39 100644
--- a/.github/workflows/code-security.yml
+++ b/.github/workflows/code-security.yml
@@ -3,6 +3,8 @@ on: push
jobs:
check-code-security:
runs-on: ubuntu-latest
+ env:
+ TERM: xterm
steps:
- uses: actions/checkout@v4
with:
diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml
index 57924124..92d750ec 100644
--- a/.github/workflows/pull-request.yml
+++ b/.github/workflows/pull-request.yml
@@ -5,6 +5,8 @@ on:
jobs:
check-pull-request:
runs-on: ubuntu-latest
+ env:
+ TERM: xterm
steps:
- uses: actions/checkout@v4
with:
diff --git a/Makefile b/Makefile
index 67e30a9f..f0745957 100644
--- a/Makefile
+++ b/Makefile
@@ -15,8 +15,8 @@ prepare: ## Prepare environment
pipeline-prepare:
sh $(PROJECT_DIR)scripts/assume_role.sh $(JENKINS_ENV) $(JENKINS_SERVICE_TEAM)
-pipeline-slave-prepare:
- # the jenkins-slave build agents don't have the docker daemon immediately available, let us wait for it
+jenkins-agent-local-prepare:
+ # the jenkins-agent-local build agents don't have the docker daemon immediately available, let us wait for it
timeout 60 bash -c 'while ! docker info &>/dev/null; do sleep 1; done' || exit 1
compile:
@@ -43,10 +43,10 @@ build: project-config
CMD="clean verify install \
-Dsonar.verbose=true \
-Dsonar.host.url='https://sonarcloud.io' \
- -Dsonar.organization='nhsd-exeter' \
- -Dsonar.projectKey='uec-dos-api-pca' \
+ -Dsonar.organization='nhsdigital' \
+ -Dsonar.projectKey='uec-dos-sf-api-pca' \
-Dsonar.projectName='DoS Postcode API' \
- -Dsonar.login='$$(make secret-fetch NAME=service-finder-sonar-pass | jq .SONAR_HOST_TOKEN | tr -d '"' || exit 1)' \
+ -Dsonar.token='$$(make secret-fetch NAME=service-finder-sonar-pass | jq .SONAR_HOST_TOKEN | tr -d '"' || exit 1)' \
-Dsonar.sourceEncoding='UTF-8' \
-Dsonar.coverage.jacoco.xmlReportPaths=target/site/jacoco \
-Dsonar.exclusions='src/main/java/**/config/*.*,src/main/java/**/model/*.*,src/main/java/**/exception/*.*,src/main/java/**/constants/*.*,src/main/java/**/interceptor/*.*,src/test/**/*.*,src/main/java/**/filter/*.*,src/main/java/**/PostcodeMappingApplication.*' \
diff --git a/application/pom.xml b/application/pom.xml
index 944d3200..cda2e81e 100644
--- a/application/pom.xml
+++ b/application/pom.xml
@@ -18,16 +18,14 @@
2.28.2
3.14.0
4.4
- 1.5.2
+ 1.8.0
+ 5.18.2
2.16.0
2.6.8
3.19.4
2.15.1
- 3.0.0
- 3.0.0
- 3.0.0
0.8.11
4.6
@@ -215,21 +213,21 @@
- io.springfox
- springfox-boot-starter
- ${springfox-boot-starter.version}
-
-
-
- io.springfox
- springfox-swagger2
- ${springfox-swagger2.version}
+ org.springdoc
+ springdoc-openapi-ui
+ ${springdoc-openapi-ui.version}
+
+
+ org.webjars
+ swagger-ui
+
+
- io.springfox
- springfox-swagger-ui
- ${springfox-swagger-ui.version}
+ org.webjars
+ swagger-ui
+ ${webjars-swagger-ui.version}
diff --git a/application/src/main/java/uk/nhs/digital/uec/api/config/OpenApiConfig.java b/application/src/main/java/uk/nhs/digital/uec/api/config/OpenApiConfig.java
new file mode 100644
index 00000000..b2602825
--- /dev/null
+++ b/application/src/main/java/uk/nhs/digital/uec/api/config/OpenApiConfig.java
@@ -0,0 +1,31 @@
+package uk.nhs.digital.uec.api.config;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import io.swagger.v3.oas.models.OpenAPI;
+import io.swagger.v3.oas.models.info.Info;
+import io.swagger.v3.oas.models.servers.Server;
+
+@Configuration
+public class OpenApiConfig {
+
+ @Value("${api.version}")
+ private String apiVersion;
+
+ @Value("${api.title}")
+ private String title;
+
+ @Value("${api.description}")
+ private String description;
+
+ @Bean
+ public OpenAPI customOpenAPI() {
+ return new OpenAPI()
+ .info(new Info()
+ .title(title)
+ .version(apiVersion)
+ .description(description))
+ .addServersItem(new Server().url("/"));
+ }
+}
diff --git a/application/src/main/java/uk/nhs/digital/uec/api/config/SwaggerConfig.java b/application/src/main/java/uk/nhs/digital/uec/api/config/SwaggerConfig.java
deleted file mode 100644
index 06a8481c..00000000
--- a/application/src/main/java/uk/nhs/digital/uec/api/config/SwaggerConfig.java
+++ /dev/null
@@ -1,88 +0,0 @@
-package uk.nhs.digital.uec.api.config;
-
-import java.lang.reflect.Field;
-import java.util.List;
-import java.util.stream.Collectors;
-import org.springframework.beans.BeansException;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.beans.factory.config.BeanPostProcessor;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.util.ReflectionUtils;
-import org.springframework.web.bind.annotation.RestController;
-import org.springframework.web.servlet.mvc.method.RequestMappingInfoHandlerMapping;
-import springfox.documentation.builders.ApiInfoBuilder;
-import springfox.documentation.builders.PathSelectors;
-import springfox.documentation.builders.RequestHandlerSelectors;
-import springfox.documentation.service.ApiInfo;
-import springfox.documentation.spi.DocumentationType;
-import springfox.documentation.spring.web.plugins.Docket;
-import springfox.documentation.spring.web.plugins.WebFluxRequestHandlerProvider;
-import springfox.documentation.spring.web.plugins.WebMvcRequestHandlerProvider;
-import springfox.documentation.swagger2.annotations.EnableSwagger2;
-
-@Configuration
-@EnableSwagger2
-public class SwaggerConfig {
-
- @Value("${api.version}")
- private String apiVersion;
-
- @Value("${api.title}")
- private String title;
-
- @Value("${api.description}")
- private String description;
-
- @Bean
- public Docket api() {
- return new Docket(DocumentationType.SWAGGER_2)
- .select()
- .apis(RequestHandlerSelectors.withClassAnnotation(RestController.class))
- .apis(RequestHandlerSelectors.any())
- .paths(PathSelectors.any())
- .build()
- .apiInfo(apiInfo());
- }
-
- ApiInfo apiInfo() {
- return new ApiInfoBuilder().title(title).description(description).version(apiVersion).build();
- }
-
- @Bean
- public static BeanPostProcessor springfoxHandlerProviderBeanPostProcessor() {
- return new BeanPostProcessor() {
-
- @Override
- public Object postProcessAfterInitialization(Object bean, String beanName)
- throws BeansException {
- if (bean instanceof WebMvcRequestHandlerProvider
- || bean instanceof WebFluxRequestHandlerProvider) {
- customizeSpringfoxHandlerMappings(getHandlerMappings(bean));
- }
- return bean;
- }
-
- private void customizeSpringfoxHandlerMappings(
- List mappings) {
- List copy =
- mappings.stream()
- .filter(mapping -> mapping.getPatternParser() == null)
- .collect(Collectors.toList());
- mappings.clear();
- mappings.addAll(copy);
- }
-
- @SuppressWarnings("unchecked")
- private List getHandlerMappings(Object bean) {
- try {
- Field field = ReflectionUtils.findField(bean.getClass(), "handlerMappings");
- field.setAccessible(true);
- return (List) field.get(bean);
- } catch (IllegalArgumentException | IllegalAccessException e) {
- throw new IllegalStateException(e);
- }
- }
- };
- }
-}
diff --git a/application/src/main/java/uk/nhs/digital/uec/api/controller/PostcodeMappingController.java b/application/src/main/java/uk/nhs/digital/uec/api/controller/PostcodeMappingController.java
index 7186877b..67ae0795 100644
--- a/application/src/main/java/uk/nhs/digital/uec/api/controller/PostcodeMappingController.java
+++ b/application/src/main/java/uk/nhs/digital/uec/api/controller/PostcodeMappingController.java
@@ -4,9 +4,9 @@
import static uk.nhs.digital.uec.api.constants.SwaggerConstants.POSTCODES_DESC;
import static uk.nhs.digital.uec.api.exception.ErrorMessageEnum.NO_PARAMS_PROVIDED;
-import io.swagger.annotations.ApiParam;
import java.util.List;
+import io.swagger.v3.oas.annotations.Parameter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
@@ -38,9 +38,9 @@ public class PostcodeMappingController {
@GetMapping()
public ResponseEntity> getPostcodeMapping(
- @ApiParam(POSTCODES_DESC) @RequestParam(name = "postcodes", required = false)
+ @Parameter(description = POSTCODES_DESC) @RequestParam(name = "postcodes", required = false)
List postCodes,
- @ApiParam(NAME_DESC) @RequestParam(name = "name", required = false) String name)
+ @Parameter(description =NAME_DESC) @RequestParam(name = "name", required = false) String name)
throws InvalidPostcodeException, InvalidParameterException, NotFoundException {
long start = System.currentTimeMillis();
List postcodeMapping = null;
diff --git a/application/src/main/java/uk/nhs/digital/uec/api/controller/RegionController.java b/application/src/main/java/uk/nhs/digital/uec/api/controller/RegionController.java
index c0386cad..cea656f5 100644
--- a/application/src/main/java/uk/nhs/digital/uec/api/controller/RegionController.java
+++ b/application/src/main/java/uk/nhs/digital/uec/api/controller/RegionController.java
@@ -3,9 +3,10 @@
import static uk.nhs.digital.uec.api.constants.SwaggerConstants.POSTCODES_DESC;
import static uk.nhs.digital.uec.api.constants.SwaggerConstants.POSTCODE_DESC;
-import io.swagger.annotations.ApiParam;
import java.util.List;
import java.util.Map;
+
+import io.swagger.v3.oas.annotations.Parameter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
@@ -42,7 +43,7 @@ public ResponseEntity getAllRegions() {
@GetMapping(params = {"postcodes"})
@PreAuthorize("hasAnyRole('POSTCODE_API_ACCESS')")
public ResponseEntity getRegionDetailsByPostCodes(
- @ApiParam(POSTCODES_DESC) @RequestParam(name = "postcodes", required = false)
+ @Parameter(description = POSTCODES_DESC) @RequestParam(name = "postcodes", required = false)
List postcodes) {
try {
long start = System.currentTimeMillis();
@@ -75,7 +76,7 @@ public ResponseEntity getRegionDetailsByPostCodes(
@GetMapping(params = {"postcode"})
@PreAuthorize("hasAnyRole('POSTCODE_API_ACCESS')")
public ResponseEntity getRegionDetailsByPostCode(
- @ApiParam(POSTCODE_DESC) @RequestParam(name = "postcode", required = false) String postcode) {
+ @Parameter(description = POSTCODE_DESC) @RequestParam(name = "postcode", required = false) String postcode) {
PostcodeMapping postcodeMapping = new PostcodeMapping();
postcodeMapping.setPostcode(postcode);
try {
diff --git a/build/automation/init.mk b/build/automation/init.mk
index 8b8cb834..dd4dc2a5 100644
--- a/build/automation/init.mk
+++ b/build/automation/init.mk
@@ -792,10 +792,10 @@ endif
ifneq (0, $(shell which docker > /dev/null 2>&1; echo $$?))
$(error $(shell tput setaf 202; echo "WARNING: Please, before proceeding install Docker"; tput sgr0))
endif
-# *NIX: Docker Compose
-ifneq (0, $(shell which docker-compose > /dev/null 2>&1; echo $$?))
-$(error $(shell tput setaf 202; echo "WARNING: Please, before proceeding install Docker Compose"; tput sgr0))
-endif
+## *NIX: Docker Compose
+#ifneq (0, $(shell which docker-compose > /dev/null 2>&1; echo $$?))
+#$(error $(shell tput setaf 202; echo "WARNING: Please, before proceeding install Docker Compose"; tput sgr0))
+#endif
endif
endif
diff --git a/build/automation/lib/docker.mk b/build/automation/lib/docker.mk
index 6c7fac50..b24e5924 100644
--- a/build/automation/lib/docker.mk
+++ b/build/automation/lib/docker.mk
@@ -408,9 +408,9 @@ docker-run-composer: ### Run composer container - mandatory: CMD; optional: DIR,
docker-run-editorconfig: ### Run editorconfig container - optional: DIR=[working directory],EXCLUDE=[file pattern e.g. '\.txt$$'],ARGS=[Docker args],VARS_FILE=[Makefile vars file],IMAGE=[image name],CONTAINER=[container name]
if [ $(PROJECT_NAME) = $(DEVOPS_PROJECT_NAME) ]; then
- exclude='$(shell [ -n "$(EXCLUDE)" ] && echo '$(EXCLUDE)|')markdown|linux-amd64$$|\.drawio|\.p12|\.jks|\.so$$'
+ exclude='$(shell [ -n "$(EXCLUDE)" ] && echo '$(EXCLUDE)|')markdown|linux-amd64$$|\.drawio|\.p12|\.jks|\.so$$|infrastructure/stacks/.*/.*/.*/psycopg2.*'
else
- exclude='$(shell [ -n "$(EXCLUDE)" ] && echo '$(EXCLUDE)|')build/automation|markdown|linux-amd64$$|\.drawio|\.p12|\.jks|\.so$$'
+ exclude='$(shell [ -n "$(EXCLUDE)" ] && echo '$(EXCLUDE)|')build/automation|markdown|linux-amd64$$|\.drawio|\.p12|\.jks|\.so$$|infrastructure/stacks/.*/.*/.*/psycopg2.*'
fi
make docker-config > /dev/null 2>&1
image=$$([ -n "$(IMAGE)" ] && echo $(IMAGE) || echo mstruebing/editorconfig-checker:$(DOCKER_EDITORCONFIG_CHECKER_VERSION))
diff --git a/build/jenkins/Jenkinsfile.tag b/build/jenkins/Jenkinsfile.tag
index f86c7dd5..bb3a18dd 100644
--- a/build/jenkins/Jenkinsfile.tag
+++ b/build/jenkins/Jenkinsfile.tag
@@ -3,7 +3,7 @@ pipeline {
Tags the release image
*/
- agent { label "jenkins-slave" }
+ agent { label "jenkins-agent-local" }
parameters {
string(
@@ -24,10 +24,10 @@ pipeline {
}
stages {
- stage('Prepare for jenkins-slave run') {
+ stage('Prepare for jenkins-agent-local run') {
steps {
script {
- sh "make pipeline-slave-prepare"
+ sh "make jenkins-agent-local-prepare"
}
}
}
diff --git a/build/jenkins/build/build.Jenkinsfile b/build/jenkins/build/build.Jenkinsfile
index 7e544f9d..fa2c3ab4 100644
--- a/build/jenkins/build/build.Jenkinsfile
+++ b/build/jenkins/build/build.Jenkinsfile
@@ -3,7 +3,7 @@ pipeline {
Description: Development pipeline to build test push and deploy to nonprod
*/
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
environment {
@@ -20,10 +20,10 @@ pipeline {
triggers { pollSCM('* * * * *') }
stages {
- stage('Prepare for jenkins-slave run') {
+ stage('Prepare for jenkins-agent-local run') {
steps {
script {
- sh "make pipeline-slave-prepare"
+ sh "make jenkins-agent-local-prepare"
}
}
}
diff --git a/build/jenkins/demo/demo-deployment.Jenkinsfile b/build/jenkins/demo/demo-deployment.Jenkinsfile
index b2f97986..3c0500bc 100644
--- a/build/jenkins/demo/demo-deployment.Jenkinsfile
+++ b/build/jenkins/demo/demo-deployment.Jenkinsfile
@@ -4,7 +4,7 @@ pipeline {
*/
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
options {
@@ -37,10 +37,10 @@ pipeline {
}
}
}
- stage('Prepare for jenkins-slave run') {
+ stage('Prepare for jenkins-agent-local run') {
steps {
script {
- sh "make pipeline-slave-prepare"
+ sh "make jenkins-agent-local-prepare"
}
}
}
diff --git a/build/jenkins/deployments.Jenkinsfile b/build/jenkins/deployments.Jenkinsfile
index 96f4e076..a258f674 100644
--- a/build/jenkins/deployments.Jenkinsfile
+++ b/build/jenkins/deployments.Jenkinsfile
@@ -38,10 +38,10 @@ pipeline {
}
}
}
- stage('Prepare for jenkins-slave run') {
+ stage('Prepare for jenkins-agent-local run') {
steps {
script {
- sh 'make pipeline-slave-prepare'
+ sh 'make jenkins-agent-local-prepare'
}
}
}
diff --git a/build/jenkins/perf/Performance_deploy.Jenkinsfile b/build/jenkins/perf/Performance_deploy.Jenkinsfile
index 1e67af7b..56e1cb25 100644
--- a/build/jenkins/perf/Performance_deploy.Jenkinsfile
+++ b/build/jenkins/perf/Performance_deploy.Jenkinsfile
@@ -3,7 +3,7 @@ pipeline {
Description: Deployment pipeline
*/
- agent { label 'jenkins-slave' }
+ agent { label 'jenkins-agent-local' }
options {
buildDiscarder(logRotator(daysToKeepStr: '7', numToKeepStr: '13'))
@@ -35,10 +35,10 @@ pipeline {
}
}
}
- stage('Prepare for jenkins-slave run') {
+ stage('Prepare for jenkins-agent-local run') {
steps {
script {
- sh "make pipeline-slave-prepare"
+ sh "make jenkins-agent-local-prepare"
}
}
}
@@ -122,7 +122,7 @@ pipeline {
}
stage('Nominal Test') {
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
steps {
script {
@@ -137,7 +137,7 @@ pipeline {
}
stage('Peak Test') {
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
steps {
script {
@@ -152,7 +152,7 @@ pipeline {
}
stage('Double Peak Test') {
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
steps {
script {
@@ -167,7 +167,7 @@ pipeline {
}
stage('Burst Norminal Test') {
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
steps {
script {
@@ -182,7 +182,7 @@ pipeline {
}
stage('Burst Peak Test') {
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
steps {
script {
@@ -197,7 +197,7 @@ pipeline {
}
stage('Burst Double Peak Test') {
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
steps {
script {
diff --git a/build/jenkins/prod/Jenkinsfile-prod.deployment b/build/jenkins/prod/Jenkinsfile-prod.deployment
index b23d5964..335e0d14 100644
--- a/build/jenkins/prod/Jenkinsfile-prod.deployment
+++ b/build/jenkins/prod/Jenkinsfile-prod.deployment
@@ -4,7 +4,7 @@ pipeline {
*/
agent {
- label 'jenkins-slave'
+ label 'jenkins-agent-local'
}
options {
@@ -37,10 +37,10 @@ parameters {
}
}
}
- stage('Prepare for jenkins-slave run') {
+ stage('Prepare for jenkins-agent-local run') {
steps {
script {
- sh "make pipeline-slave-prepare"
+ sh "make jenkins-agent-local-prepare"
}
}
}
diff --git a/build/jenkins/staging/Staging_deploy.Jenkinsfile b/build/jenkins/staging/Staging_deploy.Jenkinsfile
index d03b9645..3bf87829 100644
--- a/build/jenkins/staging/Staging_deploy.Jenkinsfile
+++ b/build/jenkins/staging/Staging_deploy.Jenkinsfile
@@ -2,7 +2,7 @@ pipeline {
/*
Description: Deployment pipeline
*/
- agent { label 'jenkins-slave' }
+ agent { label 'jenkins-agent-local' }
options {
buildDiscarder(logRotator(daysToKeepStr: '7', numToKeepStr: '13'))
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/__init__.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/__init__.py
index 5198b73c..59a89386 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/__init__.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/__init__.py
@@ -12,14 +12,14 @@
.. _Python: https://www.python.org/
:Groups:
-* `Connections creation`: connect
-* `Value objects constructors`: Binary, Date, DateFromTicks, Time,
+ * `Connections creation`: connect
+ * `Value objects constructors`: Binary, Date, DateFromTicks, Time,
TimeFromTicks, Timestamp, TimestampFromTicks
"""
# psycopg/__init__.py - initialization of the psycopg module
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -48,53 +48,31 @@
# Import the DBAPI-2.0 stuff into top-level module.
-from psycopg2._psycopg import ( # noqa
- BINARY,
- NUMBER,
- STRING,
- DATETIME,
- ROWID,
- Binary,
- Date,
- Time,
- Timestamp,
- DateFromTicks,
- TimeFromTicks,
- TimestampFromTicks,
- Error,
- Warning,
- DataError,
- DatabaseError,
- ProgrammingError,
- IntegrityError,
- InterfaceError,
- InternalError,
- NotSupportedError,
- OperationalError,
- _connect,
- apilevel,
- threadsafety,
- paramstyle,
- __version__,
- __libpq_version__,
-)
+from psycopg2._psycopg import ( # noqa
+ BINARY, NUMBER, STRING, DATETIME, ROWID,
+
+ Binary, Date, Time, Timestamp,
+ DateFromTicks, TimeFromTicks, TimestampFromTicks,
-from psycopg2 import tz # noqa
+ Error, Warning, DataError, DatabaseError, ProgrammingError, IntegrityError,
+ InterfaceError, InternalError, NotSupportedError, OperationalError,
+
+ _connect, apilevel, threadsafety, paramstyle,
+ __version__, __libpq_version__,
+)
# Register default adapters.
from psycopg2 import extensions as _ext
-
_ext.register_adapter(tuple, _ext.SQL_IN)
_ext.register_adapter(type(None), _ext.NoneAdapter)
# Register the Decimal adapter here instead of in the C layer.
# This way a new class is registered for each sub-interpreter.
# See ticket #52
-from decimal import Decimal # noqa
-from psycopg2._psycopg import Decimal as Adapter # noqa
-
+from decimal import Decimal # noqa
+from psycopg2._psycopg import Decimal as Adapter # noqa
_ext.register_adapter(Decimal, Adapter)
del Decimal, Adapter
@@ -135,13 +113,10 @@ def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
"""
kwasync = {}
- if "async" in kwargs:
- kwasync["async"] = kwargs.pop("async")
- if "async_" in kwargs:
- kwasync["async_"] = kwargs.pop("async_")
-
- if dsn is None and not kwargs:
- raise TypeError("missing dsn and no parameters")
+ if 'async' in kwargs:
+ kwasync['async'] = kwargs.pop('async')
+ if 'async_' in kwargs:
+ kwasync['async_'] = kwargs.pop('async_')
dsn = _ext.make_dsn(dsn, **kwargs)
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_ipaddress.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_ipaddress.py
index 994cf9e8..d38566c8 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_ipaddress.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_ipaddress.py
@@ -4,7 +4,7 @@
# psycopg/_ipaddress.py - Ipaddres-based network types adaptation
#
# Copyright (C) 2016-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -24,8 +24,8 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from psycopg2.extensions import new_type, new_array_type, register_type, register_adapter, QuotedString
-from psycopg2.compat import text_type
+from psycopg2.extensions import (
+ new_type, new_array_type, register_type, register_adapter, QuotedString)
# The module is imported on register_ipaddress
ipaddress = None
@@ -58,16 +58,17 @@ def register_ipaddress(conn_or_curs=None):
for c in _casters:
register_type(c, conn_or_curs)
- for t in [ipaddress.IPv4Interface, ipaddress.IPv6Interface, ipaddress.IPv4Network, ipaddress.IPv6Network]:
+ for t in [ipaddress.IPv4Interface, ipaddress.IPv6Interface,
+ ipaddress.IPv4Network, ipaddress.IPv6Network]:
register_adapter(t, adapt_ipaddress)
def _make_casters():
- inet = new_type((869,), "INET", cast_interface)
- ainet = new_array_type((1041,), "INET[]", inet)
+ inet = new_type((869,), 'INET', cast_interface)
+ ainet = new_array_type((1041,), 'INET[]', inet)
- cidr = new_type((650,), "CIDR", cast_network)
- acidr = new_array_type((651,), "CIDR[]", cidr)
+ cidr = new_type((650,), 'CIDR', cast_network)
+ acidr = new_array_type((651,), 'CIDR[]', cidr)
return [inet, ainet, cidr, acidr]
@@ -76,13 +77,13 @@ def cast_interface(s, cur=None):
if s is None:
return None
# Py2 version force the use of unicode. meh.
- return ipaddress.ip_interface(text_type(s))
+ return ipaddress.ip_interface(str(s))
def cast_network(s, cur=None):
if s is None:
return None
- return ipaddress.ip_network(text_type(s))
+ return ipaddress.ip_network(str(s))
def adapt_ipaddress(obj):
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_json.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_json.py
index a599baf9..95024223 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_json.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_json.py
@@ -8,7 +8,7 @@
# psycopg/_json.py - Implementation of the JSON adaptation objects
#
# Copyright (C) 2012-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -32,7 +32,6 @@
from psycopg2._psycopg import ISQLQuote, QuotedString
from psycopg2._psycopg import new_type, new_array_type, register_type
-from psycopg2.compat import PY2
# oids from PostgreSQL 9.2
@@ -44,7 +43,7 @@
JSONBARRAY_OID = 3807
-class Json(object):
+class Json:
"""
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
:sql:`json` data type.
@@ -54,7 +53,6 @@ class Json(object):
used.
"""
-
def __init__(self, adapted, dumps=None):
self.adapted = adapted
self._conn = None
@@ -83,19 +81,13 @@ def getquoted(self):
qs.prepare(self._conn)
return qs.getquoted()
- if PY2:
-
- def __str__(self):
- return self.getquoted()
-
- else:
-
- def __str__(self):
- # getquoted is binary in Py3
- return self.getquoted().decode("ascii", "replace")
+ def __str__(self):
+ # getquoted is binary
+ return self.getquoted().decode('ascii', 'replace')
-def register_json(conn_or_curs=None, globally=False, loads=None, oid=None, array_oid=None, name="json"):
+def register_json(conn_or_curs=None, globally=False, loads=None,
+ oid=None, array_oid=None, name='json'):
"""Create and register typecasters converting :sql:`json` type to Python objects.
:param conn_or_curs: a connection or cursor used to find the :sql:`json`
@@ -122,7 +114,8 @@ def register_json(conn_or_curs=None, globally=False, loads=None, oid=None, array
if oid is None:
oid, array_oid = _get_json_oids(conn_or_curs, name)
- JSON, JSONARRAY = _create_json_typecasters(oid, array_oid, loads=loads, name=name.upper())
+ JSON, JSONARRAY = _create_json_typecasters(
+ oid, array_oid, loads=loads, name=name.upper())
register_type(JSON, not globally and conn_or_curs or None)
@@ -141,9 +134,8 @@ def register_default_json(conn_or_curs=None, globally=False, loads=None):
for the default :sql:`json` type without querying the database.
All the parameters have the same meaning of `register_json()`.
"""
- return register_json(
- conn_or_curs=conn_or_curs, globally=globally, loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID
- )
+ return register_json(conn_or_curs=conn_or_curs, globally=globally,
+ loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID)
def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
@@ -155,12 +147,11 @@ def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
PostgreSQL 9.4 and following versions. All the parameters have the same
meaning of `register_json()`.
"""
- return register_json(
- conn_or_curs=conn_or_curs, globally=globally, loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name="jsonb"
- )
+ return register_json(conn_or_curs=conn_or_curs, globally=globally,
+ loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name='jsonb')
-def _create_json_typecasters(oid, array_oid, loads=None, name="JSON"):
+def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
"""Create typecasters for json data type."""
if loads is None:
loads = json.loads
@@ -170,16 +161,16 @@ def typecast_json(s, cur):
return None
return loads(s)
- JSON = new_type((oid,), name, typecast_json)
+ JSON = new_type((oid, ), name, typecast_json)
if array_oid is not None:
- JSONARRAY = new_array_type((array_oid,), "%sARRAY" % name, JSON)
+ JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON)
else:
JSONARRAY = None
return JSON, JSONARRAY
-def _get_json_oids(conn_or_curs, name="json"):
+def _get_json_oids(conn_or_curs, name='json'):
# lazy imports
from psycopg2.extensions import STATUS_IN_TRANSACTION
from psycopg2.extras import _solve_conn_curs
@@ -193,7 +184,9 @@ def _get_json_oids(conn_or_curs, name="json"):
typarray = conn.info.server_version >= 80300 and "typarray" or "NULL"
# get the oid for the hstore
- curs.execute("SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;" % typarray, (name,))
+ curs.execute(
+ "SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;"
+ % typarray, (name,))
r = curs.fetchone()
# revert the status of the connection as before the command
@@ -201,6 +194,6 @@ def _get_json_oids(conn_or_curs, name="json"):
conn.rollback()
if not r:
- raise conn.ProgrammingError("%s data type not found" % name)
+ raise conn.ProgrammingError(f"{name} data type not found")
return r
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_lru_cache.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_lru_cache.py
deleted file mode 100644
index 1e2c52d0..00000000
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_lru_cache.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-LRU cache implementation for Python 2.7
-
-Ported from http://code.activestate.com/recipes/578078/ and simplified for our
-use (only support maxsize > 0 and positional arguments).
-"""
-
-from collections import namedtuple
-from functools import update_wrapper
-from threading import RLock
-
-_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
-
-
-def lru_cache(maxsize=100):
- """Least-recently-used cache decorator.
-
- Arguments to the cached function must be hashable.
-
- See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
-
- """
- def decorating_function(user_function):
-
- cache = dict()
- stats = [0, 0] # make statistics updateable non-locally
- HITS, MISSES = 0, 1 # names for the stats fields
- cache_get = cache.get # bound method to lookup key or return None
- _len = len # localize the global len() function
- lock = RLock() # linkedlist updates aren't threadsafe
- root = [] # root of the circular doubly linked list
- root[:] = [root, root, None, None] # initialize by pointing to self
- nonlocal_root = [root] # make updateable non-locally
- PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
-
- assert maxsize and maxsize > 0, "maxsize %s not supported" % maxsize
-
- def wrapper(*args):
- # size limited caching that tracks accesses by recency
- key = args
- with lock:
- link = cache_get(key)
- if link is not None:
- # record recent use of the key by moving it to the
- # front of the list
- root, = nonlocal_root
- link_prev, link_next, key, result = link
- link_prev[NEXT] = link_next
- link_next[PREV] = link_prev
- last = root[PREV]
- last[NEXT] = root[PREV] = link
- link[PREV] = last
- link[NEXT] = root
- stats[HITS] += 1
- return result
- result = user_function(*args)
- with lock:
- root, = nonlocal_root
- if key in cache:
- # getting here means that this same key was added to the
- # cache while the lock was released. since the link
- # update is already done, we need only return the
- # computed result and update the count of misses.
- pass
- elif _len(cache) >= maxsize:
- # use the old root to store the new key and result
- oldroot = root
- oldroot[KEY] = key
- oldroot[RESULT] = result
- # empty the oldest link and make it the new root
- root = nonlocal_root[0] = oldroot[NEXT]
- oldkey = root[KEY]
- # oldvalue = root[RESULT]
- root[KEY] = root[RESULT] = None
- # now update the cache dictionary for the new links
- del cache[oldkey]
- cache[key] = oldroot
- else:
- # put result in a new link at the front of the list
- last = root[PREV]
- link = [last, root, key, result]
- last[NEXT] = root[PREV] = cache[key] = link
- stats[MISSES] += 1
- return result
-
- def cache_info():
- """Report cache statistics"""
- with lock:
- return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
-
- def cache_clear():
- """Clear the cache and cache statistics"""
- with lock:
- cache.clear()
- root = nonlocal_root[0]
- root[:] = [root, root, None, None]
- stats[:] = [0, 0]
-
- wrapper.__wrapped__ = user_function
- wrapper.cache_info = cache_info
- wrapper.cache_clear = cache_clear
- return update_wrapper(wrapper, user_function)
-
- return decorating_function
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_psycopg.cpython-38-x86_64-linux-gnu.so b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_psycopg.cpython-38-x86_64-linux-gnu.so
deleted file mode 100755
index eaf44a4e..00000000
Binary files a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_psycopg.cpython-38-x86_64-linux-gnu.so and /dev/null differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_psycopg.so b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_psycopg.so
new file mode 100644
index 00000000..5421979d
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_psycopg.so differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_range.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_range.py
index ea265156..64bae073 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_range.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/_range.py
@@ -5,7 +5,7 @@
# psycopg/_range.py - Implementation of the Range type and adaptation
#
# Copyright (C) 2012-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -30,10 +30,9 @@
from psycopg2._psycopg import ProgrammingError, InterfaceError
from psycopg2.extensions import ISQLQuote, adapt, register_adapter
from psycopg2.extensions import new_type, new_array_type, register_type
-from psycopg2.compat import string_types
-class Range(object):
+class Range:
"""Python representation for a PostgreSQL |range|_ type.
:param lower: lower bound for the range. `!None` means unbound
@@ -48,7 +47,7 @@ class Range(object):
def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
if not empty:
if bounds not in ('[)', '(]', '()', '[]'):
- raise ValueError("bound flags not valid: %r" % bounds)
+ raise ValueError(f"bound flags not valid: {bounds!r}")
self._lower = lower
self._upper = upper
@@ -58,9 +57,9 @@ def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
def __repr__(self):
if self._bounds is None:
- return "%s(empty=True)" % self.__class__.__name__
+ return f"{self.__class__.__name__}(empty=True)"
else:
- return "%s(%r, %r, %r)" % (self.__class__.__name__,
+ return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
self._lower, self._upper, self._bounds)
def __str__(self):
@@ -144,10 +143,6 @@ def __contains__(self, x):
def __bool__(self):
return self._bounds is not None
- def __nonzero__(self):
- # Python 2 compatibility
- return type(self).__bool__(self)
-
def __eq__(self, other):
if not isinstance(other, Range):
return False
@@ -239,7 +234,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
return caster
-class RangeAdapter(object):
+class RangeAdapter:
"""`ISQLQuote` adapter for `Range` subclasses.
This is an abstract class: concrete classes must set a `name` class
@@ -287,7 +282,7 @@ def getquoted(self):
+ b", '" + r._bounds.encode('utf8') + b"')"
-class RangeCaster(object):
+class RangeCaster:
"""Helper class to convert between `Range` and PostgreSQL range types.
Objects of this class are usually created by `register_range()`. Manual
@@ -315,7 +310,7 @@ def _create_ranges(self, pgrange, pyrange):
# an implementation detail and is not documented. It is currently used
# for the numeric ranges.
self.adapter = None
- if isinstance(pgrange, string_types):
+ if isinstance(pgrange, str):
self.adapter = type(pgrange, (RangeAdapter,), {})
self.adapter.name = pgrange
else:
@@ -332,7 +327,7 @@ def _create_ranges(self, pgrange, pyrange):
self.range = None
try:
- if isinstance(pyrange, string_types):
+ if isinstance(pyrange, str):
self.range = type(pyrange, (Range,), {})
if issubclass(pyrange, Range) and pyrange is not Range:
self.range = pyrange
@@ -368,33 +363,54 @@ def _from_db(self, name, pyrange, conn_or_curs):
schema = 'public'
# get the type oid and attributes
- try:
- curs.execute("""\
-select rngtypid, rngsubtype,
- (select typarray from pg_type where oid = rngtypid)
+ curs.execute("""\
+select rngtypid, rngsubtype, typarray
from pg_range r
join pg_type t on t.oid = rngtypid
join pg_namespace ns on ns.oid = typnamespace
where typname = %s and ns.nspname = %s;
""", (tname, schema))
+ rec = curs.fetchone()
- except ProgrammingError:
- if not conn.autocommit:
- conn.rollback()
- raise
- else:
- rec = curs.fetchone()
+ if not rec:
+ # The above algorithm doesn't work for customized seach_path
+ # (#1487) The implementation below works better, but, to guarantee
+ # backwards compatibility, use it only if the original one failed.
+ try:
+ savepoint = False
+ # Because we executed statements earlier, we are either INTRANS
+ # or we are IDLE only if the transaction is autocommit, in
+ # which case we don't need the savepoint anyway.
+ if conn.status == STATUS_IN_TRANSACTION:
+ curs.execute("SAVEPOINT register_type")
+ savepoint = True
+
+ curs.execute("""\
+SELECT rngtypid, rngsubtype, typarray, typname, nspname
+from pg_range r
+join pg_type t on t.oid = rngtypid
+join pg_namespace ns on ns.oid = typnamespace
+WHERE t.oid = %s::regtype
+""", (name, ))
+ except ProgrammingError:
+ pass
+ else:
+ rec = curs.fetchone()
+ if rec:
+ tname, schema = rec[3:]
+ finally:
+ if savepoint:
+ curs.execute("ROLLBACK TO SAVEPOINT register_type")
- # revert the status of the connection as before the command
- if (conn_status != STATUS_IN_TRANSACTION
- and not conn.autocommit):
- conn.rollback()
+ # revert the status of the connection as before the command
+ if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
+ conn.rollback()
if not rec:
raise ProgrammingError(
- "PostgreSQL type '%s' not found" % name)
+ f"PostgreSQL range '{name}' not found")
- type, subtype, array = rec
+ type, subtype, array = rec[:3]
return RangeCaster(name, pyrange,
oid=type, subtype_oid=subtype, array_oid=array)
@@ -402,13 +418,13 @@ def _from_db(self, name, pyrange, conn_or_curs):
_re_range = re.compile(r"""
( \(|\[ ) # lower bound flag
(?: # lower bound:
- " ( (?: [^"] | "")* ) " # - a quoted string
- | ( [^",]+ ) # - or an unquoted string
+ " ( (?: [^"] | "")* ) " # - a quoted string
+ | ( [^",]+ ) # - or an unquoted string
)? # - or empty (not catched)
,
(?: # upper bound:
- " ( (?: [^"] | "")* ) " # - a quoted string
- | ( [^"\)\]]+ ) # - or an unquoted string
+ " ( (?: [^"] | "")* ) " # - a quoted string
+ | ( [^"\)\]]+ ) # - or an unquoted string
)? # - or empty (not catched)
( \)|\] ) # upper bound flag
""", re.VERBOSE)
@@ -424,7 +440,7 @@ def parse(self, s, cur=None):
m = self._re_range.match(s)
if m is None:
- raise InterfaceError("failed to parse range: '%s'" % s)
+ raise InterfaceError(f"failed to parse range: '{s}'")
lower = m.group(3)
if lower is None:
@@ -504,8 +520,7 @@ def getquoted(self):
else:
upper = ''
- return ("'%s%s,%s%s'" % (
- r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
+ return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii')
# TODO: probably won't work with infs, nans and other tricky cases.
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/compat.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/compat.py
deleted file mode 100644
index 54606a80..00000000
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/compat.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import sys
-
-__all__ = ['string_types', 'text_type', 'lru_cache']
-
-if sys.version_info[0] == 2:
- # Python 2
- PY2 = True
- PY3 = False
- string_types = basestring,
- text_type = unicode
- from ._lru_cache import lru_cache
-
-else:
- # Python 3
- PY2 = False
- PY3 = True
- string_types = str,
- text_type = str
- from functools import lru_cache
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errorcodes.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errorcodes.py
index 9baceec0..aa646c46 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errorcodes.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errorcodes.py
@@ -1,11 +1,11 @@
-"""Error codes for PostgresSQL
+"""Error codes for PostgreSQL
This module contains symbolic names for all PostgreSQL error codes.
"""
# psycopg2/errorcodes.py - PostgreSQL error codes
#
# Copyright (C) 2006-2019 Johan Dahlin
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -43,7 +43,8 @@ def lookup(code, _cache={}):
tmp = {}
for k, v in globals().items():
if isinstance(v, str) and len(v) in (2, 5):
- tmp[v] = k
+ # Strip trailing underscore used to disambiguate duplicate values
+ tmp[v] = k.rstrip("_")
assert tmp
@@ -56,390 +57,393 @@ def lookup(code, _cache={}):
# autogenerated data: do not edit below this point.
# Error classes
-CLASS_SUCCESSFUL_COMPLETION = "00"
-CLASS_WARNING = "01"
-CLASS_NO_DATA = "02"
-CLASS_SQL_STATEMENT_NOT_YET_COMPLETE = "03"
-CLASS_CONNECTION_EXCEPTION = "08"
-CLASS_TRIGGERED_ACTION_EXCEPTION = "09"
-CLASS_FEATURE_NOT_SUPPORTED = "0A"
-CLASS_INVALID_TRANSACTION_INITIATION = "0B"
-CLASS_LOCATOR_EXCEPTION = "0F"
-CLASS_INVALID_GRANTOR = "0L"
-CLASS_INVALID_ROLE_SPECIFICATION = "0P"
-CLASS_DIAGNOSTICS_EXCEPTION = "0Z"
-CLASS_CASE_NOT_FOUND = "20"
-CLASS_CARDINALITY_VIOLATION = "21"
-CLASS_DATA_EXCEPTION = "22"
-CLASS_INTEGRITY_CONSTRAINT_VIOLATION = "23"
-CLASS_INVALID_CURSOR_STATE = "24"
-CLASS_INVALID_TRANSACTION_STATE = "25"
-CLASS_INVALID_SQL_STATEMENT_NAME = "26"
-CLASS_TRIGGERED_DATA_CHANGE_VIOLATION = "27"
-CLASS_INVALID_AUTHORIZATION_SPECIFICATION = "28"
-CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = "2B"
-CLASS_INVALID_TRANSACTION_TERMINATION = "2D"
-CLASS_SQL_ROUTINE_EXCEPTION = "2F"
-CLASS_INVALID_CURSOR_NAME = "34"
-CLASS_EXTERNAL_ROUTINE_EXCEPTION = "38"
-CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = "39"
-CLASS_SAVEPOINT_EXCEPTION = "3B"
-CLASS_INVALID_CATALOG_NAME = "3D"
-CLASS_INVALID_SCHEMA_NAME = "3F"
-CLASS_TRANSACTION_ROLLBACK = "40"
-CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = "42"
-CLASS_WITH_CHECK_OPTION_VIOLATION = "44"
-CLASS_INSUFFICIENT_RESOURCES = "53"
-CLASS_PROGRAM_LIMIT_EXCEEDED = "54"
-CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = "55"
-CLASS_OPERATOR_INTERVENTION = "57"
-CLASS_SYSTEM_ERROR = "58"
-CLASS_SNAPSHOT_FAILURE = "72"
-CLASS_CONFIGURATION_FILE_ERROR = "F0"
-CLASS_FOREIGN_DATA_WRAPPER_ERROR = "HV"
-CLASS_PL_PGSQL_ERROR = "P0"
-CLASS_INTERNAL_ERROR = "XX"
+CLASS_SUCCESSFUL_COMPLETION = '00'
+CLASS_WARNING = '01'
+CLASS_NO_DATA = '02'
+CLASS_SQL_STATEMENT_NOT_YET_COMPLETE = '03'
+CLASS_CONNECTION_EXCEPTION = '08'
+CLASS_TRIGGERED_ACTION_EXCEPTION = '09'
+CLASS_FEATURE_NOT_SUPPORTED = '0A'
+CLASS_INVALID_TRANSACTION_INITIATION = '0B'
+CLASS_LOCATOR_EXCEPTION = '0F'
+CLASS_INVALID_GRANTOR = '0L'
+CLASS_INVALID_ROLE_SPECIFICATION = '0P'
+CLASS_DIAGNOSTICS_EXCEPTION = '0Z'
+CLASS_CASE_NOT_FOUND = '20'
+CLASS_CARDINALITY_VIOLATION = '21'
+CLASS_DATA_EXCEPTION = '22'
+CLASS_INTEGRITY_CONSTRAINT_VIOLATION = '23'
+CLASS_INVALID_CURSOR_STATE = '24'
+CLASS_INVALID_TRANSACTION_STATE = '25'
+CLASS_INVALID_SQL_STATEMENT_NAME = '26'
+CLASS_TRIGGERED_DATA_CHANGE_VIOLATION = '27'
+CLASS_INVALID_AUTHORIZATION_SPECIFICATION = '28'
+CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B'
+CLASS_INVALID_TRANSACTION_TERMINATION = '2D'
+CLASS_SQL_ROUTINE_EXCEPTION = '2F'
+CLASS_INVALID_CURSOR_NAME = '34'
+CLASS_EXTERNAL_ROUTINE_EXCEPTION = '38'
+CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39'
+CLASS_SAVEPOINT_EXCEPTION = '3B'
+CLASS_INVALID_CATALOG_NAME = '3D'
+CLASS_INVALID_SCHEMA_NAME = '3F'
+CLASS_TRANSACTION_ROLLBACK = '40'
+CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42'
+CLASS_WITH_CHECK_OPTION_VIOLATION = '44'
+CLASS_INSUFFICIENT_RESOURCES = '53'
+CLASS_PROGRAM_LIMIT_EXCEEDED = '54'
+CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = '55'
+CLASS_OPERATOR_INTERVENTION = '57'
+CLASS_SYSTEM_ERROR = '58'
+CLASS_SNAPSHOT_FAILURE = '72'
+CLASS_CONFIGURATION_FILE_ERROR = 'F0'
+CLASS_FOREIGN_DATA_WRAPPER_ERROR = 'HV'
+CLASS_PL_PGSQL_ERROR = 'P0'
+CLASS_INTERNAL_ERROR = 'XX'
# Class 00 - Successful Completion
-SUCCESSFUL_COMPLETION = "00000"
+SUCCESSFUL_COMPLETION = '00000'
# Class 01 - Warning
-WARNING = "01000"
-NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = "01003"
-STRING_DATA_RIGHT_TRUNCATION = "01004"
-PRIVILEGE_NOT_REVOKED = "01006"
-PRIVILEGE_NOT_GRANTED = "01007"
-IMPLICIT_ZERO_BIT_PADDING = "01008"
-DYNAMIC_RESULT_SETS_RETURNED = "0100C"
-DEPRECATED_FEATURE = "01P01"
+WARNING = '01000'
+NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = '01003'
+STRING_DATA_RIGHT_TRUNCATION_ = '01004'
+PRIVILEGE_NOT_REVOKED = '01006'
+PRIVILEGE_NOT_GRANTED = '01007'
+IMPLICIT_ZERO_BIT_PADDING = '01008'
+DYNAMIC_RESULT_SETS_RETURNED = '0100C'
+DEPRECATED_FEATURE = '01P01'
# Class 02 - No Data (this is also a warning class per the SQL standard)
-NO_DATA = "02000"
-NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED = "02001"
+NO_DATA = '02000'
+NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED = '02001'
# Class 03 - SQL Statement Not Yet Complete
-SQL_STATEMENT_NOT_YET_COMPLETE = "03000"
+SQL_STATEMENT_NOT_YET_COMPLETE = '03000'
# Class 08 - Connection Exception
-CONNECTION_EXCEPTION = "08000"
-SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION = "08001"
-CONNECTION_DOES_NOT_EXIST = "08003"
-SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION = "08004"
-CONNECTION_FAILURE = "08006"
-TRANSACTION_RESOLUTION_UNKNOWN = "08007"
-PROTOCOL_VIOLATION = "08P01"
+CONNECTION_EXCEPTION = '08000'
+SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION = '08001'
+CONNECTION_DOES_NOT_EXIST = '08003'
+SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION = '08004'
+CONNECTION_FAILURE = '08006'
+TRANSACTION_RESOLUTION_UNKNOWN = '08007'
+PROTOCOL_VIOLATION = '08P01'
# Class 09 - Triggered Action Exception
-TRIGGERED_ACTION_EXCEPTION = "09000"
+TRIGGERED_ACTION_EXCEPTION = '09000'
# Class 0A - Feature Not Supported
-FEATURE_NOT_SUPPORTED = "0A000"
+FEATURE_NOT_SUPPORTED = '0A000'
# Class 0B - Invalid Transaction Initiation
-INVALID_TRANSACTION_INITIATION = "0B000"
+INVALID_TRANSACTION_INITIATION = '0B000'
# Class 0F - Locator Exception
-LOCATOR_EXCEPTION = "0F000"
-INVALID_LOCATOR_SPECIFICATION = "0F001"
+LOCATOR_EXCEPTION = '0F000'
+INVALID_LOCATOR_SPECIFICATION = '0F001'
# Class 0L - Invalid Grantor
-INVALID_GRANTOR = "0L000"
-INVALID_GRANT_OPERATION = "0LP01"
+INVALID_GRANTOR = '0L000'
+INVALID_GRANT_OPERATION = '0LP01'
# Class 0P - Invalid Role Specification
-INVALID_ROLE_SPECIFICATION = "0P000"
+INVALID_ROLE_SPECIFICATION = '0P000'
# Class 0Z - Diagnostics Exception
-DIAGNOSTICS_EXCEPTION = "0Z000"
-STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = "0Z002"
+DIAGNOSTICS_EXCEPTION = '0Z000'
+STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = '0Z002'
# Class 20 - Case Not Found
-CASE_NOT_FOUND = "20000"
+CASE_NOT_FOUND = '20000'
# Class 21 - Cardinality Violation
-CARDINALITY_VIOLATION = "21000"
+CARDINALITY_VIOLATION = '21000'
# Class 22 - Data Exception
-DATA_EXCEPTION = "22000"
-STRING_DATA_RIGHT_TRUNCATION = "22001"
-NULL_VALUE_NO_INDICATOR_PARAMETER = "22002"
-NUMERIC_VALUE_OUT_OF_RANGE = "22003"
-NULL_VALUE_NOT_ALLOWED = "22004"
-ERROR_IN_ASSIGNMENT = "22005"
-INVALID_DATETIME_FORMAT = "22007"
-DATETIME_FIELD_OVERFLOW = "22008"
-INVALID_TIME_ZONE_DISPLACEMENT_VALUE = "22009"
-ESCAPE_CHARACTER_CONFLICT = "2200B"
-INVALID_USE_OF_ESCAPE_CHARACTER = "2200C"
-INVALID_ESCAPE_OCTET = "2200D"
-ZERO_LENGTH_CHARACTER_STRING = "2200F"
-MOST_SPECIFIC_TYPE_MISMATCH = "2200G"
-SEQUENCE_GENERATOR_LIMIT_EXCEEDED = "2200H"
-NOT_AN_XML_DOCUMENT = "2200L"
-INVALID_XML_DOCUMENT = "2200M"
-INVALID_XML_CONTENT = "2200N"
-INVALID_XML_COMMENT = "2200S"
-INVALID_XML_PROCESSING_INSTRUCTION = "2200T"
-INVALID_INDICATOR_PARAMETER_VALUE = "22010"
-SUBSTRING_ERROR = "22011"
-DIVISION_BY_ZERO = "22012"
-INVALID_PRECEDING_OR_FOLLOWING_SIZE = "22013"
-INVALID_ARGUMENT_FOR_NTILE_FUNCTION = "22014"
-INTERVAL_FIELD_OVERFLOW = "22015"
-INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION = "22016"
-INVALID_CHARACTER_VALUE_FOR_CAST = "22018"
-INVALID_ESCAPE_CHARACTER = "22019"
-INVALID_REGULAR_EXPRESSION = "2201B"
-INVALID_ARGUMENT_FOR_LOGARITHM = "2201E"
-INVALID_ARGUMENT_FOR_POWER_FUNCTION = "2201F"
-INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION = "2201G"
-INVALID_ROW_COUNT_IN_LIMIT_CLAUSE = "2201W"
-INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE = "2201X"
-INVALID_LIMIT_VALUE = "22020"
-CHARACTER_NOT_IN_REPERTOIRE = "22021"
-INDICATOR_OVERFLOW = "22022"
-INVALID_PARAMETER_VALUE = "22023"
-UNTERMINATED_C_STRING = "22024"
-INVALID_ESCAPE_SEQUENCE = "22025"
-STRING_DATA_LENGTH_MISMATCH = "22026"
-TRIM_ERROR = "22027"
-ARRAY_SUBSCRIPT_ERROR = "2202E"
-INVALID_TABLESAMPLE_REPEAT = "2202G"
-INVALID_TABLESAMPLE_ARGUMENT = "2202H"
-DUPLICATE_JSON_OBJECT_KEY_VALUE = "22030"
-INVALID_JSON_TEXT = "22032"
-INVALID_SQL_JSON_SUBSCRIPT = "22033"
-MORE_THAN_ONE_SQL_JSON_ITEM = "22034"
-NO_SQL_JSON_ITEM = "22035"
-NON_NUMERIC_SQL_JSON_ITEM = "22036"
-NON_UNIQUE_KEYS_IN_A_JSON_OBJECT = "22037"
-SINGLETON_SQL_JSON_ITEM_REQUIRED = "22038"
-SQL_JSON_ARRAY_NOT_FOUND = "22039"
-SQL_JSON_MEMBER_NOT_FOUND = "2203A"
-SQL_JSON_NUMBER_NOT_FOUND = "2203B"
-SQL_JSON_OBJECT_NOT_FOUND = "2203C"
-TOO_MANY_JSON_ARRAY_ELEMENTS = "2203D"
-TOO_MANY_JSON_OBJECT_MEMBERS = "2203E"
-SQL_JSON_SCALAR_REQUIRED = "2203F"
-FLOATING_POINT_EXCEPTION = "22P01"
-INVALID_TEXT_REPRESENTATION = "22P02"
-INVALID_BINARY_REPRESENTATION = "22P03"
-BAD_COPY_FILE_FORMAT = "22P04"
-UNTRANSLATABLE_CHARACTER = "22P05"
-NONSTANDARD_USE_OF_ESCAPE_CHARACTER = "22P06"
+DATA_EXCEPTION = '22000'
+STRING_DATA_RIGHT_TRUNCATION = '22001'
+NULL_VALUE_NO_INDICATOR_PARAMETER = '22002'
+NUMERIC_VALUE_OUT_OF_RANGE = '22003'
+NULL_VALUE_NOT_ALLOWED_ = '22004'
+ERROR_IN_ASSIGNMENT = '22005'
+INVALID_DATETIME_FORMAT = '22007'
+DATETIME_FIELD_OVERFLOW = '22008'
+INVALID_TIME_ZONE_DISPLACEMENT_VALUE = '22009'
+ESCAPE_CHARACTER_CONFLICT = '2200B'
+INVALID_USE_OF_ESCAPE_CHARACTER = '2200C'
+INVALID_ESCAPE_OCTET = '2200D'
+ZERO_LENGTH_CHARACTER_STRING = '2200F'
+MOST_SPECIFIC_TYPE_MISMATCH = '2200G'
+SEQUENCE_GENERATOR_LIMIT_EXCEEDED = '2200H'
+NOT_AN_XML_DOCUMENT = '2200L'
+INVALID_XML_DOCUMENT = '2200M'
+INVALID_XML_CONTENT = '2200N'
+INVALID_XML_COMMENT = '2200S'
+INVALID_XML_PROCESSING_INSTRUCTION = '2200T'
+INVALID_INDICATOR_PARAMETER_VALUE = '22010'
+SUBSTRING_ERROR = '22011'
+DIVISION_BY_ZERO = '22012'
+INVALID_PRECEDING_OR_FOLLOWING_SIZE = '22013'
+INVALID_ARGUMENT_FOR_NTILE_FUNCTION = '22014'
+INTERVAL_FIELD_OVERFLOW = '22015'
+INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION = '22016'
+INVALID_CHARACTER_VALUE_FOR_CAST = '22018'
+INVALID_ESCAPE_CHARACTER = '22019'
+INVALID_REGULAR_EXPRESSION = '2201B'
+INVALID_ARGUMENT_FOR_LOGARITHM = '2201E'
+INVALID_ARGUMENT_FOR_POWER_FUNCTION = '2201F'
+INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION = '2201G'
+INVALID_ROW_COUNT_IN_LIMIT_CLAUSE = '2201W'
+INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE = '2201X'
+INVALID_LIMIT_VALUE = '22020'
+CHARACTER_NOT_IN_REPERTOIRE = '22021'
+INDICATOR_OVERFLOW = '22022'
+INVALID_PARAMETER_VALUE = '22023'
+UNTERMINATED_C_STRING = '22024'
+INVALID_ESCAPE_SEQUENCE = '22025'
+STRING_DATA_LENGTH_MISMATCH = '22026'
+TRIM_ERROR = '22027'
+ARRAY_SUBSCRIPT_ERROR = '2202E'
+INVALID_TABLESAMPLE_REPEAT = '2202G'
+INVALID_TABLESAMPLE_ARGUMENT = '2202H'
+DUPLICATE_JSON_OBJECT_KEY_VALUE = '22030'
+INVALID_ARGUMENT_FOR_SQL_JSON_DATETIME_FUNCTION = '22031'
+INVALID_JSON_TEXT = '22032'
+INVALID_SQL_JSON_SUBSCRIPT = '22033'
+MORE_THAN_ONE_SQL_JSON_ITEM = '22034'
+NO_SQL_JSON_ITEM = '22035'
+NON_NUMERIC_SQL_JSON_ITEM = '22036'
+NON_UNIQUE_KEYS_IN_A_JSON_OBJECT = '22037'
+SINGLETON_SQL_JSON_ITEM_REQUIRED = '22038'
+SQL_JSON_ARRAY_NOT_FOUND = '22039'
+SQL_JSON_MEMBER_NOT_FOUND = '2203A'
+SQL_JSON_NUMBER_NOT_FOUND = '2203B'
+SQL_JSON_OBJECT_NOT_FOUND = '2203C'
+TOO_MANY_JSON_ARRAY_ELEMENTS = '2203D'
+TOO_MANY_JSON_OBJECT_MEMBERS = '2203E'
+SQL_JSON_SCALAR_REQUIRED = '2203F'
+SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE = '2203G'
+FLOATING_POINT_EXCEPTION = '22P01'
+INVALID_TEXT_REPRESENTATION = '22P02'
+INVALID_BINARY_REPRESENTATION = '22P03'
+BAD_COPY_FILE_FORMAT = '22P04'
+UNTRANSLATABLE_CHARACTER = '22P05'
+NONSTANDARD_USE_OF_ESCAPE_CHARACTER = '22P06'
# Class 23 - Integrity Constraint Violation
-INTEGRITY_CONSTRAINT_VIOLATION = "23000"
-RESTRICT_VIOLATION = "23001"
-NOT_NULL_VIOLATION = "23502"
-FOREIGN_KEY_VIOLATION = "23503"
-UNIQUE_VIOLATION = "23505"
-CHECK_VIOLATION = "23514"
-EXCLUSION_VIOLATION = "23P01"
+INTEGRITY_CONSTRAINT_VIOLATION = '23000'
+RESTRICT_VIOLATION = '23001'
+NOT_NULL_VIOLATION = '23502'
+FOREIGN_KEY_VIOLATION = '23503'
+UNIQUE_VIOLATION = '23505'
+CHECK_VIOLATION = '23514'
+EXCLUSION_VIOLATION = '23P01'
# Class 24 - Invalid Cursor State
-INVALID_CURSOR_STATE = "24000"
+INVALID_CURSOR_STATE = '24000'
# Class 25 - Invalid Transaction State
-INVALID_TRANSACTION_STATE = "25000"
-ACTIVE_SQL_TRANSACTION = "25001"
-BRANCH_TRANSACTION_ALREADY_ACTIVE = "25002"
-INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION = "25003"
-INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION = "25004"
-NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION = "25005"
-READ_ONLY_SQL_TRANSACTION = "25006"
-SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED = "25007"
-HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = "25008"
-NO_ACTIVE_SQL_TRANSACTION = "25P01"
-IN_FAILED_SQL_TRANSACTION = "25P02"
-IDLE_IN_TRANSACTION_SESSION_TIMEOUT = "25P03"
+INVALID_TRANSACTION_STATE = '25000'
+ACTIVE_SQL_TRANSACTION = '25001'
+BRANCH_TRANSACTION_ALREADY_ACTIVE = '25002'
+INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION = '25003'
+INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION = '25004'
+NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION = '25005'
+READ_ONLY_SQL_TRANSACTION = '25006'
+SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED = '25007'
+HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008'
+NO_ACTIVE_SQL_TRANSACTION = '25P01'
+IN_FAILED_SQL_TRANSACTION = '25P02'
+IDLE_IN_TRANSACTION_SESSION_TIMEOUT = '25P03'
# Class 26 - Invalid SQL Statement Name
-INVALID_SQL_STATEMENT_NAME = "26000"
+INVALID_SQL_STATEMENT_NAME = '26000'
# Class 27 - Triggered Data Change Violation
-TRIGGERED_DATA_CHANGE_VIOLATION = "27000"
+TRIGGERED_DATA_CHANGE_VIOLATION = '27000'
# Class 28 - Invalid Authorization Specification
-INVALID_AUTHORIZATION_SPECIFICATION = "28000"
-INVALID_PASS_WORD = "28P01"
+INVALID_AUTHORIZATION_SPECIFICATION = '28000'
+INVALID_PASSWORD = '28P01'
# Class 2B - Dependent Privilege Descriptors Still Exist
-DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = "2B000"
-DEPENDENT_OBJECTS_STILL_EXIST = "2BP01"
+DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B000'
+DEPENDENT_OBJECTS_STILL_EXIST = '2BP01'
# Class 2D - Invalid Transaction Termination
-INVALID_TRANSACTION_TERMINATION = "2D000"
+INVALID_TRANSACTION_TERMINATION = '2D000'
# Class 2F - SQL Routine Exception
-SQL_ROUTINE_EXCEPTION = "2F000"
-MODIFYING_SQL_DATA_NOT_PERMITTED = "2F002"
-PROHIBITED_SQL_STATEMENT_ATTEMPTED = "2F003"
-READING_SQL_DATA_NOT_PERMITTED = "2F004"
-FUNCTION_EXECUTED_NO_RETURN_STATEMENT = "2F005"
+SQL_ROUTINE_EXCEPTION = '2F000'
+MODIFYING_SQL_DATA_NOT_PERMITTED_ = '2F002'
+PROHIBITED_SQL_STATEMENT_ATTEMPTED_ = '2F003'
+READING_SQL_DATA_NOT_PERMITTED_ = '2F004'
+FUNCTION_EXECUTED_NO_RETURN_STATEMENT = '2F005'
# Class 34 - Invalid Cursor Name
-INVALID_CURSOR_NAME = "34000"
+INVALID_CURSOR_NAME = '34000'
# Class 38 - External Routine Exception
-EXTERNAL_ROUTINE_EXCEPTION = "38000"
-CONTAINING_SQL_NOT_PERMITTED = "38001"
-MODIFYING_SQL_DATA_NOT_PERMITTED = "38002"
-PROHIBITED_SQL_STATEMENT_ATTEMPTED = "38003"
-READING_SQL_DATA_NOT_PERMITTED = "38004"
+EXTERNAL_ROUTINE_EXCEPTION = '38000'
+CONTAINING_SQL_NOT_PERMITTED = '38001'
+MODIFYING_SQL_DATA_NOT_PERMITTED = '38002'
+PROHIBITED_SQL_STATEMENT_ATTEMPTED = '38003'
+READING_SQL_DATA_NOT_PERMITTED = '38004'
# Class 39 - External Routine Invocation Exception
-EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = "39000"
-INVALID_SQLSTATE_RETURNED = "39001"
-NULL_VALUE_NOT_ALLOWED = "39004"
-TRIGGER_PROTOCOL_VIOLATED = "39P01"
-SRF_PROTOCOL_VIOLATED = "39P02"
-EVENT_TRIGGER_PROTOCOL_VIOLATED = "39P03"
+EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39000'
+INVALID_SQLSTATE_RETURNED = '39001'
+NULL_VALUE_NOT_ALLOWED = '39004'
+TRIGGER_PROTOCOL_VIOLATED = '39P01'
+SRF_PROTOCOL_VIOLATED = '39P02'
+EVENT_TRIGGER_PROTOCOL_VIOLATED = '39P03'
# Class 3B - Savepoint Exception
-SAVEPOINT_EXCEPTION = "3B000"
-INVALID_SAVEPOINT_SPECIFICATION = "3B001"
+SAVEPOINT_EXCEPTION = '3B000'
+INVALID_SAVEPOINT_SPECIFICATION = '3B001'
# Class 3D - Invalid Catalog Name
-INVALID_CATALOG_NAME = "3D000"
+INVALID_CATALOG_NAME = '3D000'
# Class 3F - Invalid Schema Name
-INVALID_SCHEMA_NAME = "3F000"
+INVALID_SCHEMA_NAME = '3F000'
# Class 40 - Transaction Rollback
-TRANSACTION_ROLLBACK = "40000"
-SERIALIZATION_FAILURE = "40001"
-TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION = "40002"
-STATEMENT_COMPLETION_UNKNOWN = "40003"
-DEADLOCK_DETECTED = "40P01"
+TRANSACTION_ROLLBACK = '40000'
+SERIALIZATION_FAILURE = '40001'
+TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION = '40002'
+STATEMENT_COMPLETION_UNKNOWN = '40003'
+DEADLOCK_DETECTED = '40P01'
# Class 42 - Syntax Error or Access Rule Violation
-SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = "42000"
-INSUFFICIENT_PRIVILEGE = "42501"
-SYNTAX_ERROR = "42601"
-INVALID_NAME = "42602"
-INVALID_COLUMN_DEFINITION = "42611"
-NAME_TOO_LONG = "42622"
-DUPLICATE_COLUMN = "42701"
-AMBIGUOUS_COLUMN = "42702"
-UNDEFINED_COLUMN = "42703"
-UNDEFINED_OBJECT = "42704"
-DUPLICATE_OBJECT = "42710"
-DUPLICATE_ALIAS = "42712"
-DUPLICATE_FUNCTION = "42723"
-AMBIGUOUS_FUNCTION = "42725"
-GROUPING_ERROR = "42803"
-DATATYPE_MISMATCH = "42804"
-WRONG_OBJECT_TYPE = "42809"
-INVALID_FOREIGN_K_EY = "42830"
-CANNOT_COERCE = "42846"
-UNDEFINED_FUNCTION = "42883"
-GENERATED_ALWAYS = "428C9"
-RESERVED_NAME = "42939"
-UNDEFINED_TABLE = "42P01"
-UNDEFINED_PARAMETER = "42P02"
-DUPLICATE_CURSOR = "42P03"
-DUPLICATE_DATABASE = "42P04"
-DUPLICATE_PREPARED_STATEMENT = "42P05"
-DUPLICATE_SCHEMA = "42P06"
-DUPLICATE_TABLE = "42P07"
-AMBIGUOUS_PARAMETER = "42P08"
-AMBIGUOUS_ALIAS = "42P09"
-INVALID_COLUMN_REFERENCE = "42P10"
-INVALID_CURSOR_DEFINITION = "42P11"
-INVALID_DATABASE_DEFINITION = "42P12"
-INVALID_FUNCTION_DEFINITION = "42P13"
-INVALID_PREPARED_STATEMENT_DEFINITION = "42P14"
-INVALID_SCHEMA_DEFINITION = "42P15"
-INVALID_TABLE_DEFINITION = "42P16"
-INVALID_OBJECT_DEFINITION = "42P17"
-INDETERMINATE_DATATYPE = "42P18"
-INVALID_RECURSION = "42P19"
-WINDOWING_ERROR = "42P20"
-COLLATION_MISMATCH = "42P21"
-INDETERMINATE_COLLATION = "42P22"
+SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42000'
+INSUFFICIENT_PRIVILEGE = '42501'
+SYNTAX_ERROR = '42601'
+INVALID_NAME = '42602'
+INVALID_COLUMN_DEFINITION = '42611'
+NAME_TOO_LONG = '42622'
+DUPLICATE_COLUMN = '42701'
+AMBIGUOUS_COLUMN = '42702'
+UNDEFINED_COLUMN = '42703'
+UNDEFINED_OBJECT = '42704'
+DUPLICATE_OBJECT = '42710'
+DUPLICATE_ALIAS = '42712'
+DUPLICATE_FUNCTION = '42723'
+AMBIGUOUS_FUNCTION = '42725'
+GROUPING_ERROR = '42803'
+DATATYPE_MISMATCH = '42804'
+WRONG_OBJECT_TYPE = '42809'
+INVALID_FOREIGN_KEY = '42830'
+CANNOT_COERCE = '42846'
+UNDEFINED_FUNCTION = '42883'
+GENERATED_ALWAYS = '428C9'
+RESERVED_NAME = '42939'
+UNDEFINED_TABLE = '42P01'
+UNDEFINED_PARAMETER = '42P02'
+DUPLICATE_CURSOR = '42P03'
+DUPLICATE_DATABASE = '42P04'
+DUPLICATE_PREPARED_STATEMENT = '42P05'
+DUPLICATE_SCHEMA = '42P06'
+DUPLICATE_TABLE = '42P07'
+AMBIGUOUS_PARAMETER = '42P08'
+AMBIGUOUS_ALIAS = '42P09'
+INVALID_COLUMN_REFERENCE = '42P10'
+INVALID_CURSOR_DEFINITION = '42P11'
+INVALID_DATABASE_DEFINITION = '42P12'
+INVALID_FUNCTION_DEFINITION = '42P13'
+INVALID_PREPARED_STATEMENT_DEFINITION = '42P14'
+INVALID_SCHEMA_DEFINITION = '42P15'
+INVALID_TABLE_DEFINITION = '42P16'
+INVALID_OBJECT_DEFINITION = '42P17'
+INDETERMINATE_DATATYPE = '42P18'
+INVALID_RECURSION = '42P19'
+WINDOWING_ERROR = '42P20'
+COLLATION_MISMATCH = '42P21'
+INDETERMINATE_COLLATION = '42P22'
# Class 44 - WITH CHECK OPTION Violation
-WITH_CHECK_OPTION_VIOLATION = "44000"
+WITH_CHECK_OPTION_VIOLATION = '44000'
# Class 53 - Insufficient Resources
-INSUFFICIENT_RESOURCES = "53000"
-DISK_FULL = "53100"
-OUT_OF_MEMORY = "53200"
-TOO_MANY_CONNECTIONS = "53300"
-CONFIGURATION_LIMIT_EXCEEDED = "53400"
+INSUFFICIENT_RESOURCES = '53000'
+DISK_FULL = '53100'
+OUT_OF_MEMORY = '53200'
+TOO_MANY_CONNECTIONS = '53300'
+CONFIGURATION_LIMIT_EXCEEDED = '53400'
# Class 54 - Program Limit Exceeded
-PROGRAM_LIMIT_EXCEEDED = "54000"
-STATEMENT_TOO_COMPLEX = "54001"
-TOO_MANY_COLUMNS = "54011"
-TOO_MANY_ARGUMENTS = "54023"
+PROGRAM_LIMIT_EXCEEDED = '54000'
+STATEMENT_TOO_COMPLEX = '54001'
+TOO_MANY_COLUMNS = '54011'
+TOO_MANY_ARGUMENTS = '54023'
# Class 55 - Object Not In Prerequisite State
-OBJECT_NOT_IN_PREREQUISITE_STATE = "55000"
-OBJECT_IN_USE = "55006"
-CANT_CHANGE_RUNTIME_PARAM = "55P02"
-LOCK_NOT_AVAILABLE = "55P03"
-UNSAFE_NEW_ENUM_VALUE_USAGE = "55P04"
+OBJECT_NOT_IN_PREREQUISITE_STATE = '55000'
+OBJECT_IN_USE = '55006'
+CANT_CHANGE_RUNTIME_PARAM = '55P02'
+LOCK_NOT_AVAILABLE = '55P03'
+UNSAFE_NEW_ENUM_VALUE_USAGE = '55P04'
# Class 57 - Operator Intervention
-OPERATOR_INTERVENTION = "57000"
-QUERY_CANCELED = "57014"
-ADMIN_SHUTDOWN = "57P01"
-CRASH_SHUTDOWN = "57P02"
-CANNOT_CONNECT_NOW = "57P03"
-DATABASE_DROPPED = "57P04"
+OPERATOR_INTERVENTION = '57000'
+QUERY_CANCELED = '57014'
+ADMIN_SHUTDOWN = '57P01'
+CRASH_SHUTDOWN = '57P02'
+CANNOT_CONNECT_NOW = '57P03'
+DATABASE_DROPPED = '57P04'
+IDLE_SESSION_TIMEOUT = '57P05'
# Class 58 - System Error (errors external to PostgreSQL itself)
-SYSTEM_ERROR = "58000"
-IO_ERROR = "58030"
-UNDEFINED_FILE = "58P01"
-DUPLICATE_FILE = "58P02"
+SYSTEM_ERROR = '58000'
+IO_ERROR = '58030'
+UNDEFINED_FILE = '58P01'
+DUPLICATE_FILE = '58P02'
# Class 72 - Snapshot Failure
-SNAPSHOT_TOO_OLD = "72000"
+SNAPSHOT_TOO_OLD = '72000'
# Class F0 - Configuration File Error
-CONFIG_FILE_ERROR = "F0000"
-LOCK_FILE_EXISTS = "F0001"
+CONFIG_FILE_ERROR = 'F0000'
+LOCK_FILE_EXISTS = 'F0001'
# Class HV - Foreign Data Wrapper Error (SQL/MED)
-FDW_ERROR = "HV000"
-FDW_OUT_OF_MEMORY = "HV001"
-FDW_DYNAMIC_PARAMETER_VALUE_NEEDED = "HV002"
-FDW_INVALID_DATA_TYPE = "HV004"
-FDW_COLUMN_NAME_NOT_FOUND = "HV005"
-FDW_INVALID_DATA_TYPE_DESCRIPTORS = "HV006"
-FDW_INVALID_COLUMN_NAME = "HV007"
-FDW_INVALID_COLUMN_NUMBER = "HV008"
-FDW_INVALID_USE_OF_NULL_POINTER = "HV009"
-FDW_INVALID_STRING_FORMAT = "HV00A"
-FDW_INVALID_HANDLE = "HV00B"
-FDW_INVALID_OPTION_INDEX = "HV00C"
-FDW_INVALID_OPTION_NAME = "HV00D"
-FDW_OPTION_NAME_NOT_FOUND = "HV00J"
-FDW_REPLY_HANDLE = "HV00K"
-FDW_UNABLE_TO_CREATE_EXECUTION = "HV00L"
-FDW_UNABLE_TO_CREATE_REPLY = "HV00M"
-FDW_UNABLE_TO_ESTABLISH_CONNECTION = "HV00N"
-FDW_NO_SCHEMAS = "HV00P"
-FDW_SCHEMA_NOT_FOUND = "HV00Q"
-FDW_TABLE_NOT_FOUND = "HV00R"
-FDW_FUNCTION_SEQUENCE_ERROR = "HV010"
-FDW_TOO_MANY_HANDLES = "HV014"
-FDW_INCONSISTENT_DESCRIPTOR_INFORMATION = "HV021"
-FDW_INVALID_ATTRIBUTE_VALUE = "HV024"
-FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH = "HV090"
-FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER = "HV091"
+FDW_ERROR = 'HV000'
+FDW_OUT_OF_MEMORY = 'HV001'
+FDW_DYNAMIC_PARAMETER_VALUE_NEEDED = 'HV002'
+FDW_INVALID_DATA_TYPE = 'HV004'
+FDW_COLUMN_NAME_NOT_FOUND = 'HV005'
+FDW_INVALID_DATA_TYPE_DESCRIPTORS = 'HV006'
+FDW_INVALID_COLUMN_NAME = 'HV007'
+FDW_INVALID_COLUMN_NUMBER = 'HV008'
+FDW_INVALID_USE_OF_NULL_POINTER = 'HV009'
+FDW_INVALID_STRING_FORMAT = 'HV00A'
+FDW_INVALID_HANDLE = 'HV00B'
+FDW_INVALID_OPTION_INDEX = 'HV00C'
+FDW_INVALID_OPTION_NAME = 'HV00D'
+FDW_OPTION_NAME_NOT_FOUND = 'HV00J'
+FDW_REPLY_HANDLE = 'HV00K'
+FDW_UNABLE_TO_CREATE_EXECUTION = 'HV00L'
+FDW_UNABLE_TO_CREATE_REPLY = 'HV00M'
+FDW_UNABLE_TO_ESTABLISH_CONNECTION = 'HV00N'
+FDW_NO_SCHEMAS = 'HV00P'
+FDW_SCHEMA_NOT_FOUND = 'HV00Q'
+FDW_TABLE_NOT_FOUND = 'HV00R'
+FDW_FUNCTION_SEQUENCE_ERROR = 'HV010'
+FDW_TOO_MANY_HANDLES = 'HV014'
+FDW_INCONSISTENT_DESCRIPTOR_INFORMATION = 'HV021'
+FDW_INVALID_ATTRIBUTE_VALUE = 'HV024'
+FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH = 'HV090'
+FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER = 'HV091'
# Class P0 - PL/pgSQL Error
-PLPGSQL_ERROR = "P0000"
-RAISE_EXCEPTION = "P0001"
-NO_DATA_FOUND = "P0002"
-TOO_MANY_ROWS = "P0003"
-ASSERT_FAILURE = "P0004"
+PLPGSQL_ERROR = 'P0000'
+RAISE_EXCEPTION = 'P0001'
+NO_DATA_FOUND = 'P0002'
+TOO_MANY_ROWS = 'P0003'
+ASSERT_FAILURE = 'P0004'
# Class XX - Internal Error
-INTERNAL_ERROR = "XX000"
-DATA_CORRUPTED = "XX001"
-INDEX_CORRUPTED = "XX002"
+INTERNAL_ERROR = 'XX000'
+DATA_CORRUPTED = 'XX001'
+INDEX_CORRUPTED = 'XX002'
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errors.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errors.py
index 98983fff..e4e47f5b 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errors.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/errors.py
@@ -4,7 +4,7 @@
# psycopg/errors.py - SQLSTATE and DB-API exceptions
#
# Copyright (C) 2018-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extensions.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extensions.py
index cdea76de..b938d0ce 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extensions.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extensions.py
@@ -6,14 +6,14 @@
- `cursor` -- the new-type inheritable cursor class
- `lobject` -- the new-type inheritable large object class
- `adapt()` -- exposes the PEP-246_ compatible adapting mechanism used
-by psycopg to adapt Python types to PostgreSQL ones
+ by psycopg to adapt Python types to PostgreSQL ones
.. _PEP-246: https://www.python.org/dev/peps/pep-0246/
"""
# psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -42,14 +42,6 @@
ROWIDARRAY, STRINGARRAY, TIME, TIMEARRAY, UNICODE, UNICODEARRAY,
AsIs, Binary, Boolean, Float, Int, QuotedString, )
-try:
- from psycopg2._psycopg import ( # noqa
- MXDATE, MXDATETIME, MXDATETIMETZ, MXINTERVAL, MXTIME, MXDATEARRAY,
- MXDATETIMEARRAY, MXDATETIMETZARRAY, MXINTERVALARRAY, MXTIMEARRAY,
- DateFromMx, TimeFromMx, TimestampFromMx, IntervalFromMx, )
-except ImportError:
- pass
-
from psycopg2._psycopg import ( # noqa
PYDATE, PYDATETIME, PYDATETIMETZ, PYINTERVAL, PYTIME, PYDATEARRAY,
PYDATETIMEARRAY, PYDATETIMETZARRAY, PYINTERVALARRAY, PYTIMEARRAY,
@@ -106,7 +98,7 @@ def register_adapter(typ, callable):
# The SQL_IN class is the official adapter for tuples starting from 2.0.6.
-class SQL_IN(object):
+class SQL_IN:
"""Adapt any iterable to an SQL quotable object."""
def __init__(self, seq):
self._seq = seq
@@ -130,7 +122,7 @@ def __str__(self):
return str(self.getquoted())
-class NoneAdapter(object):
+class NoneAdapter:
"""Adapt None to NULL.
This adapter is not used normally as a fast path in mogrify uses NULL,
@@ -168,7 +160,7 @@ def make_dsn(dsn=None, **kwargs):
tmp.update(kwargs)
kwargs = tmp
- dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
+ dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
for (k, v) in kwargs.items()])
# verify that the returned dsn is valid
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extras.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extras.py
index a24f7496..36e8ef9a 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extras.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/extras.py
@@ -6,7 +6,7 @@
# psycopg/extras.py - miscellaneous extra goodies for psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -38,7 +38,7 @@
from .extensions import cursor as _cursor
from .extensions import connection as _connection
from .extensions import adapt as _A, quote_ident
-from .compat import PY2, PY3, lru_cache
+from functools import lru_cache
from psycopg2._psycopg import ( # noqa
REPLICATION_PHYSICAL, REPLICATION_LOGICAL,
@@ -72,47 +72,47 @@ def __init__(self, *args, **kwargs):
else:
raise NotImplementedError(
"DictCursorBase can't be instantiated without a row factory.")
- super(DictCursorBase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._query_executed = False
self._prefetch = False
self.row_factory = row_factory
def fetchone(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
return res
def fetchmany(self, size=None):
if self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
return res
def fetchall(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
return res
def __iter__(self):
try:
if self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
yield first
@@ -126,26 +126,29 @@ class DictConnection(_connection):
"""A connection that uses `DictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
- return super(DictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class DictCursor(DictCursorBase):
- """A cursor that keeps a list of column name -> index mappings."""
+ """A cursor that keeps a list of column name -> index mappings__.
+
+ .. __: https://docs.python.org/glossary.html#term-mapping
+ """
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = DictRow
- super(DictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._prefetch = True
def execute(self, query, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -166,22 +169,22 @@ def __init__(self, cursor):
def __getitem__(self, x):
if not isinstance(x, (int, slice)):
x = self._index[x]
- return super(DictRow, self).__getitem__(x)
+ return super().__getitem__(x)
def __setitem__(self, x, v):
if not isinstance(x, (int, slice)):
x = self._index[x]
- super(DictRow, self).__setitem__(x, v)
+ super().__setitem__(x, v)
def items(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return ((n, g(self._index[n])) for n in self._index)
def keys(self):
return iter(self._index)
def values(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return (g(self._index[n]) for n in self._index)
def get(self, x, default=None):
@@ -198,7 +201,7 @@ def __contains__(self, x):
def __reduce__(self):
# this is apparently useless, but it fixes #1073
- return super(DictRow, self).__reduce__()
+ return super().__reduce__()
def __getstate__(self):
return self[:], self._index.copy()
@@ -207,27 +210,12 @@ def __setstate__(self, data):
self[:] = data[0]
self._index = data[1]
- if PY2:
- iterkeys = keys
- itervalues = values
- iteritems = items
- has_key = __contains__
-
- def keys(self):
- return list(self.iterkeys())
-
- def values(self):
- return tuple(self.itervalues())
-
- def items(self):
- return list(self.iteritems())
-
class RealDictConnection(_connection):
"""A connection that uses `RealDictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
- return super(RealDictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class RealDictCursor(DictCursorBase):
@@ -240,17 +228,17 @@ class RealDictCursor(DictCursorBase):
"""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = RealDictRow
- super(RealDictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def execute(self, query, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -268,7 +256,7 @@ def __init__(self, *args, **kwargs):
else:
cursor = None
- super(RealDictRow, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
if cursor is not None:
# Required for named cursors
@@ -284,20 +272,20 @@ def __setitem__(self, key, value):
if RealDictRow in self:
# We are in the row building phase
mapping = self[RealDictRow]
- super(RealDictRow, self).__setitem__(mapping[key], value)
+ super().__setitem__(mapping[key], value)
if key == len(mapping) - 1:
# Row building finished
del self[RealDictRow]
return
- super(RealDictRow, self).__setitem__(key, value)
+ super().__setitem__(key, value)
class NamedTupleConnection(_connection):
"""A connection that uses `NamedTupleCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
- return super(NamedTupleConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class NamedTupleCursor(_cursor):
@@ -321,18 +309,18 @@ class NamedTupleCursor(_cursor):
def execute(self, query, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def executemany(self, query, vars):
self.Record = None
- return super(NamedTupleCursor, self).executemany(query, vars)
+ return super().executemany(query, vars)
def callproc(self, procname, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def fetchone(self):
- t = super(NamedTupleCursor, self).fetchone()
+ t = super().fetchone()
if t is not None:
nt = self.Record
if nt is None:
@@ -340,14 +328,14 @@ def fetchone(self):
return nt._make(t)
def fetchmany(self, size=None):
- ts = super(NamedTupleCursor, self).fetchmany(size)
+ ts = super().fetchmany(size)
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return list(map(nt._make, ts))
def fetchall(self):
- ts = super(NamedTupleCursor, self).fetchall()
+ ts = super().fetchall()
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
@@ -355,7 +343,7 @@ def fetchall(self):
def __iter__(self):
try:
- it = super(NamedTupleCursor, self).__iter__()
+ it = super().__iter__()
t = next(it)
nt = self.Record
@@ -369,10 +357,6 @@ def __iter__(self):
except StopIteration:
return
- # ascii except alnum and underscore
- _re_clean = _re.compile(
- '[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
-
def _make_nt(self):
key = tuple(d[0] for d in self.description) if self.description else ()
return self._cached_make_nt(key)
@@ -381,7 +365,7 @@ def _make_nt(self):
def _do_make_nt(cls, key):
fields = []
for s in key:
- s = cls._re_clean.sub('_', s)
+ s = _re_clean.sub('_', s)
# Python identifier cannot start with numbers, namedtuple fields
# cannot start with underscore. So...
if s[0] == '_' or '0' <= s[0] <= '9':
@@ -433,7 +417,7 @@ def filter(self, msg, curs):
def _logtofile(self, msg, curs):
msg = self.filter(msg, curs)
if msg:
- if PY3 and isinstance(msg, bytes):
+ if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
self._logobj.write(msg + _os.linesep)
@@ -450,7 +434,7 @@ def _check(self):
def cursor(self, *args, **kwargs):
self._check()
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
- return super(LoggingConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class LoggingCursor(_cursor):
@@ -458,13 +442,13 @@ class LoggingCursor(_cursor):
def execute(self, query, vars=None):
try:
- return super(LoggingCursor, self).execute(query, vars)
+ return super().execute(query, vars)
finally:
self.connection.log(self.query, self)
def callproc(self, procname, vars=None):
try:
- return super(LoggingCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
finally:
self.connection.log(self.query, self)
@@ -487,9 +471,9 @@ def initialize(self, logobj, mintime=0):
def filter(self, msg, curs):
t = (_time.time() - curs.timestamp) * 1000
if t > self._mintime:
- if PY3 and isinstance(msg, bytes):
+ if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
- return msg + _os.linesep + " (execution time: %d ms)" % t
+ return f"{msg}{_os.linesep} (execution time: {t} ms)"
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory',
@@ -513,14 +497,14 @@ class LogicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_LOGICAL
- super(LogicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class PhysicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_PHYSICAL
- super(PhysicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class StopReplication(Exception):
@@ -541,7 +525,7 @@ class ReplicationCursor(_replicationCursor):
def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None):
"""Create streaming replication slot."""
- command = "CREATE_REPLICATION_SLOT %s " % quote_ident(slot_name, self)
+ command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} "
if slot_type is None:
slot_type = self.connection.replication_type
@@ -552,7 +536,7 @@ def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None)
"output plugin name is required to create "
"logical replication slot")
- command += "LOGICAL %s" % quote_ident(output_plugin, self)
+ command += f"LOGICAL {quote_ident(output_plugin, self)}"
elif slot_type == REPLICATION_PHYSICAL:
if output_plugin is not None:
@@ -564,14 +548,14 @@ def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None)
else:
raise psycopg2.ProgrammingError(
- "unrecognized replication type: %s" % repr(slot_type))
+ f"unrecognized replication type: {repr(slot_type)}")
self.execute(command)
def drop_replication_slot(self, slot_name):
"""Drop streaming replication slot."""
- command = "DROP_REPLICATION_SLOT %s" % quote_ident(slot_name, self)
+ command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}"
self.execute(command)
def start_replication(
@@ -586,7 +570,7 @@ def start_replication(
if slot_type == REPLICATION_LOGICAL:
if slot_name:
- command += "SLOT %s " % quote_ident(slot_name, self)
+ command += f"SLOT {quote_ident(slot_name, self)} "
else:
raise psycopg2.ProgrammingError(
"slot name is required for logical replication")
@@ -595,19 +579,18 @@ def start_replication(
elif slot_type == REPLICATION_PHYSICAL:
if slot_name:
- command += "SLOT %s " % quote_ident(slot_name, self)
+ command += f"SLOT {quote_ident(slot_name, self)} "
# don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX
else:
raise psycopg2.ProgrammingError(
- "unrecognized replication type: %s" % repr(slot_type))
+ f"unrecognized replication type: {repr(slot_type)}")
if type(start_lsn) is str:
lsn = start_lsn.split('/')
- lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16))
+ lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}"
else:
- lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF,
- start_lsn & 0xFFFFFFFF)
+ lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}"
command += lsn
@@ -616,7 +599,7 @@ def start_replication(
raise psycopg2.ProgrammingError(
"cannot specify timeline for logical replication")
- command += " TIMELINE %d" % timeline
+ command += f" TIMELINE {timeline}"
if options:
if slot_type == REPLICATION_PHYSICAL:
@@ -627,7 +610,7 @@ def start_replication(
for k, v in options.items():
if not command.endswith('('):
command += ", "
- command += "%s %s" % (quote_ident(k, self), _A(str(v)))
+ command += f"{quote_ident(k, self)} {_A(str(v))}"
command += ")"
self.start_replication_expert(
@@ -640,7 +623,7 @@ def fileno(self):
# a dbtype and adapter for Python UUID type
-class UUID_adapter(object):
+class UUID_adapter:
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
.. __: https://docs.python.org/library/uuid.html
@@ -655,10 +638,10 @@ def __conform__(self, proto):
return self
def getquoted(self):
- return ("'%s'::uuid" % self._uuid).encode('utf8')
+ return (f"'{self._uuid}'::uuid").encode('utf8')
def __str__(self):
- return "'%s'::uuid" % self._uuid
+ return f"'{self._uuid}'::uuid"
def register_uuid(oids=None, conn_or_curs=None):
@@ -695,7 +678,7 @@ def register_uuid(oids=None, conn_or_curs=None):
# a type, dbtype and adapter for PostgreSQL inet type
-class Inet(object):
+class Inet:
"""Wrap a string to allow for correct SQL-quoting of inet values.
Note that this adapter does NOT check the passed value to make
@@ -707,7 +690,7 @@ def __init__(self, addr):
self.addr = addr
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self.addr)
+ return f"{self.__class__.__name__}({self.addr!r})"
def prepare(self, conn):
self._conn = conn
@@ -780,7 +763,7 @@ def wait_select(conn):
elif state == POLL_WRITE:
select.select([], [conn.fileno()], [])
else:
- raise conn.OperationalError("bad state from poll: %s" % state)
+ raise conn.OperationalError(f"bad state from poll: {state}")
except KeyboardInterrupt:
conn.cancel()
# the loop will be broken by a server error
@@ -802,7 +785,7 @@ def _solve_conn_curs(conn_or_curs):
return conn, curs
-class HstoreAdapter(object):
+class HstoreAdapter:
"""Adapt a Python dict to the hstore syntax."""
def __init__(self, wrapped):
self.wrapped = wrapped
@@ -882,7 +865,7 @@ def parse(self, s, cur, _bsdec=_re.compile(r"\\(.)")):
for m in self._re_hstore.finditer(s):
if m is None or m.start() != start:
raise psycopg2.InterfaceError(
- "error parsing hstore pair at char %d" % start)
+ f"error parsing hstore pair at char {start}")
k = _bsdec.sub(r'\1', m.group(1))
v = m.group(2)
if v is not None:
@@ -893,7 +876,7 @@ def parse(self, s, cur, _bsdec=_re.compile(r"\\(.)")):
if start < len(s):
raise psycopg2.InterfaceError(
- "error parsing hstore: unparsed data after char %d" % start)
+ f"error parsing hstore: unparsed data after char {start}")
return rv
@@ -921,12 +904,11 @@ def get_oids(self, conn_or_curs):
rv0, rv1 = [], []
# get the oid for the hstore
- curs.execute("""\
-SELECT t.oid, %s
+ curs.execute(f"""SELECT t.oid, {typarray}
FROM pg_type t JOIN pg_namespace ns
ON typnamespace = ns.oid
WHERE typname = 'hstore';
-""" % typarray)
+""")
for oids in curs:
rv0.append(oids[0])
rv1.append(oids[1])
@@ -990,12 +972,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
array_oid = tuple([x for x in array_oid if x])
# create and register the typecaster
- if PY2 and unicode:
- cast = HstoreAdapter.parse_unicode
- else:
- cast = HstoreAdapter.parse
-
- HSTORE = _ext.new_type(oid, "HSTORE", cast)
+ HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse)
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
_ext.register_adapter(dict, HstoreAdapter)
@@ -1004,7 +981,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
-class CompositeCaster(object):
+class CompositeCaster:
"""Helps conversion of a PostgreSQL composite type into a Python object.
The class is usually created by the `register_composite()` function.
@@ -1025,7 +1002,7 @@ def __init__(self, name, oid, attrs, array_oid=None, schema=None):
self.typecaster = _ext.new_type((oid,), name, self.parse)
if array_oid:
self.array_typecaster = _ext.new_array_type(
- (array_oid,), "%sARRAY" % name, self.typecaster)
+ (array_oid,), f"{name}ARRAY", self.typecaster)
else:
self.array_typecaster = None
@@ -1057,7 +1034,7 @@ def make(self, values):
return self._ctor(values)
_re_tokenize = _re.compile(r"""
-\(? ([,)]) # an empty token, representing NULL
+ \(? ([,)]) # an empty token, representing NULL
| \(? " ((?: [^"] | "")*) " [,)] # or a quoted string
| \(? ([^",)]+) [,)] # or an unquoted string
""", _re.VERBOSE)
@@ -1069,7 +1046,7 @@ def tokenize(self, s):
rv = []
for m in self._re_tokenize.finditer(s):
if m is None:
- raise psycopg2.InterfaceError("can't parse type: %r" % s)
+ raise psycopg2.InterfaceError(f"can't parse type: {s!r}")
if m.group(1) is not None:
rv.append(None)
elif m.group(2) is not None:
@@ -1080,6 +1057,7 @@ def tokenize(self, s):
return rv
def _create_type(self, name, attnames):
+ name = _re_clean.sub('_', name)
self.type = namedtuple(name, attnames)
self._ctor = self.type._make
@@ -1117,14 +1095,46 @@ def _from_db(self, name, conn_or_curs):
recs = curs.fetchall()
+ if not recs:
+ # The above algorithm doesn't work for customized seach_path
+ # (#1487) The implementation below works better, but, to guarantee
+ # backwards compatibility, use it only if the original one failed.
+ try:
+ savepoint = False
+ # Because we executed statements earlier, we are either INTRANS
+ # or we are IDLE only if the transaction is autocommit, in
+ # which case we don't need the savepoint anyway.
+ if conn.status == _ext.STATUS_IN_TRANSACTION:
+ curs.execute("SAVEPOINT register_type")
+ savepoint = True
+
+ curs.execute("""\
+SELECT t.oid, %s, attname, atttypid, typname, nspname
+FROM pg_type t
+JOIN pg_namespace ns ON typnamespace = ns.oid
+JOIN pg_attribute a ON attrelid = typrelid
+WHERE t.oid = %%s::regtype
+ AND attnum > 0 AND NOT attisdropped
+ORDER BY attnum;
+""" % typarray, (name, ))
+ except psycopg2.ProgrammingError:
+ pass
+ else:
+ recs = curs.fetchall()
+ if recs:
+ tname = recs[0][4]
+ schema = recs[0][5]
+ finally:
+ if savepoint:
+ curs.execute("ROLLBACK TO SAVEPOINT register_type")
+
# revert the status of the connection as before the command
- if (conn_status != _ext.STATUS_IN_TRANSACTION
- and not conn.autocommit):
+ if conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit:
conn.rollback()
if not recs:
raise psycopg2.ProgrammingError(
- "PostgreSQL type '%s' not found" % name)
+ f"PostgreSQL type '{name}' not found")
type_oid = recs[0][0]
array_oid = recs[0][1]
@@ -1223,11 +1233,11 @@ def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False
compose the query.
- If the *argslist* items are sequences it should contain positional
- placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" if there
- are constants value...).
+ placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" if there
+ are constants value...).
- If the *argslist* items are mappings it should contain named
- placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
+ placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
If not specified, assume the arguments are sequence and use a simple
positional template (i.e. ``(%s, %s, ...)``), with the number of
@@ -1323,3 +1333,8 @@ def _split_sql(sql):
raise ValueError("the query doesn't contain any '%s' placeholder")
return pre, post
+
+
+# ascii except alnum and underscore
+_re_clean = _re.compile(
+ '[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/pool.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/pool.py
index 30a29c33..9d67d68e 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/pool.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/pool.py
@@ -5,7 +5,7 @@
# psycopg/pool.py - pooling code for psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -33,7 +33,7 @@ class PoolError(psycopg2.Error):
pass
-class AbstractConnectionPool(object):
+class AbstractConnectionPool:
"""Generic key-based pooling code."""
def __init__(self, minconn, maxconn, *args, **kwargs):
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/sql.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/sql.py
index 68834522..69b352b7 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/sql.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/sql.py
@@ -4,7 +4,7 @@
# psycopg/sql.py - SQL composition utility module
#
# Copyright (C) 2016-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -27,13 +27,12 @@
import string
from psycopg2 import extensions as ext
-from psycopg2.compat import PY3, string_types
_formatter = string.Formatter()
-class Composable(object):
+class Composable:
"""
Abstract base class for objects that can be used to compose an SQL string.
@@ -51,7 +50,7 @@ def __init__(self, wrapped):
self._wrapped = wrapped
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self._wrapped)
+ return f"{self.__class__.__name__}({self._wrapped!r})"
def as_string(self, context):
"""
@@ -107,10 +106,10 @@ def __init__(self, seq):
for i in seq:
if not isinstance(i, Composable):
raise TypeError(
- "Composed elements must be Composable, got %r instead" % i)
+ f"Composed elements must be Composable, got {i!r} instead")
wrapped.append(i)
- super(Composed, self).__init__(wrapped)
+ super().__init__(wrapped)
@property
def seq(self):
@@ -148,7 +147,7 @@ def join(self, joiner):
"foo", "bar"
"""
- if isinstance(joiner, string_types):
+ if isinstance(joiner, str):
joiner = SQL(joiner)
elif not isinstance(joiner, SQL):
raise TypeError(
@@ -180,9 +179,9 @@ class SQL(Composable):
select "foo", "bar" from "table"
"""
def __init__(self, string):
- if not isinstance(string, string_types):
+ if not isinstance(string, str):
raise TypeError("SQL values must be strings")
- super(SQL, self).__init__(string)
+ super().__init__(string)
@property
def string(self):
@@ -324,10 +323,10 @@ def __init__(self, *strings):
raise TypeError("Identifier cannot be empty")
for s in strings:
- if not isinstance(s, string_types):
+ if not isinstance(s, str):
raise TypeError("SQL identifier parts must be strings")
- super(Identifier, self).__init__(strings)
+ super().__init__(strings)
@property
def strings(self):
@@ -345,9 +344,7 @@ def string(self):
"the Identifier wraps more than one than one string")
def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- ', '.join(map(repr, self._wrapped)))
+ return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})"
def as_string(self, context):
return '.'.join(ext.quote_ident(s, context) for s in self._wrapped)
@@ -392,7 +389,7 @@ def as_string(self, context):
a.prepare(conn)
rv = a.getquoted()
- if PY3 and isinstance(rv, bytes):
+ if isinstance(rv, bytes):
rv = rv.decode(ext.encodings[conn.encoding])
return rv
@@ -426,14 +423,14 @@ class Placeholder(Composable):
"""
def __init__(self, name=None):
- if isinstance(name, string_types):
+ if isinstance(name, str):
if ')' in name:
- raise ValueError("invalid name: %r" % name)
+ raise ValueError(f"invalid name: {name!r}")
elif name is not None:
- raise TypeError("expected string or None as name, got %r" % name)
+ raise TypeError(f"expected string or None as name, got {name!r}")
- super(Placeholder, self).__init__(name)
+ super().__init__(name)
@property
def name(self):
@@ -441,12 +438,14 @@ def name(self):
return self._wrapped
def __repr__(self):
- return "Placeholder(%r)" % (
- self._wrapped if self._wrapped is not None else '',)
+ if self._wrapped is None:
+ return f"{self.__class__.__name__}()"
+ else:
+ return f"{self.__class__.__name__}({self._wrapped!r})"
def as_string(self, context):
if self._wrapped is not None:
- return "%%(%s)s" % self._wrapped
+ return f"%({self._wrapped})s"
else:
return "%s"
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/tz.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/tz.py
index 555fe762..d88ca37c 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/tz.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2/tz.py
@@ -7,7 +7,7 @@
# psycopg/tz.py - tzinfo implementation
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -45,9 +45,13 @@ class FixedOffsetTimezone(datetime.tzinfo):
offset and name that instance will be returned. This saves memory and
improves comparability.
+ .. versionchanged:: 2.9
+
+ The constructor can take either a timedelta or a number of minutes of
+ offset. Previously only minutes were supported.
+
.. __: https://docs.python.org/library/datetime.html
"""
-
_name = None
_offset = ZERO
@@ -55,27 +59,41 @@ class FixedOffsetTimezone(datetime.tzinfo):
def __init__(self, offset=None, name=None):
if offset is not None:
- self._offset = datetime.timedelta(minutes=offset)
+ if not isinstance(offset, datetime.timedelta):
+ offset = datetime.timedelta(minutes=offset)
+ self._offset = offset
if name is not None:
self._name = name
def __new__(cls, offset=None, name=None):
- """Return a suitable instance created earlier if it exists"""
+ """Return a suitable instance created earlier if it exists
+ """
key = (offset, name)
try:
return cls._cache[key]
except KeyError:
- tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
+ tz = super().__new__(cls, offset, name)
cls._cache[key] = tz
return tz
def __repr__(self):
- offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
- return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" % (offset_mins, self._name)
+ return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \
+ % (self._offset, self._name)
+
+ def __eq__(self, other):
+ if isinstance(other, FixedOffsetTimezone):
+ return self._offset == other._offset
+ else:
+ return NotImplemented
+
+ def __ne__(self, other):
+ if isinstance(other, FixedOffsetTimezone):
+ return self._offset != other._offset
+ else:
+ return NotImplemented
def __getinitargs__(self):
- offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
- return offset_mins, self._name
+ return self._offset, self._name
def utcoffset(self, dt):
return self._offset
@@ -83,14 +101,16 @@ def utcoffset(self, dt):
def tzname(self, dt):
if self._name is not None:
return self._name
- else:
- seconds = self._offset.seconds + self._offset.days * 86400
- hours, seconds = divmod(seconds, 3600)
- minutes = seconds / 60
- if minutes:
- return "%+03d:%d" % (hours, minutes)
- else:
- return "%+03d" % hours
+
+ minutes, seconds = divmod(self._offset.total_seconds(), 60)
+ hours, minutes = divmod(minutes, 60)
+ rv = "%+03d" % hours
+ if minutes or seconds:
+ rv += ":%02d" % minutes
+ if seconds:
+ rv += ":%02d" % seconds
+
+ return rv
def dst(self, dt):
return ZERO
@@ -109,7 +129,6 @@ class LocalTimezone(datetime.tzinfo):
This is the exact implementation from the Python 2.3 documentation.
"""
-
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
@@ -126,7 +145,9 @@ def tzname(self, dt):
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
- tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1)
+ tt = (dt.year, dt.month, dt.day,
+ dt.hour, dt.minute, dt.second,
+ dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1
new file mode 100644
index 00000000..2e2b9408
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libcrypto-2ade47cd.so.1.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libcrypto-2ade47cd.so.1.1
new file mode 100644
index 00000000..993c9b98
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libcrypto-2ade47cd.so.1.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2
new file mode 100644
index 00000000..262b9667
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1
new file mode 100644
index 00000000..1a905987
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5
new file mode 100644
index 00000000..4026ca82
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkrb5-6824148d.so.3.3 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkrb5-6824148d.so.3.3
new file mode 100644
index 00000000..e623ab01
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkrb5-6824148d.so.3.3 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkrb5support-f4e34ad2.so.0.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkrb5support-f4e34ad2.so.0.1
new file mode 100644
index 00000000..4dd88b41
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libkrb5support-f4e34ad2.so.0.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/liblber-2-f65b1f9f.4.so.2.11.7 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/liblber-2-f65b1f9f.4.so.2.11.7
new file mode 100644
index 00000000..25048a19
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/liblber-2-f65b1f9f.4.so.2.11.7 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libldap_r-2-f56d324d.4.so.2.11.7 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libldap_r-2-f56d324d.4.so.2.11.7
new file mode 100644
index 00000000..8a98ed06
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libldap_r-2-f56d324d.4.so.2.11.7 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0
new file mode 100644
index 00000000..9c8c5f52
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libpq-e85f78f2.so.5.15 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libpq-e85f78f2.so.5.15
new file mode 100644
index 00000000..f0a54073
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libpq-e85f78f2.so.5.15 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libsasl2-0f265e47.so.3.0.0 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libsasl2-0f265e47.so.3.0.0
new file mode 100644
index 00000000..da3f4bb3
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libsasl2-0f265e47.so.3.0.0 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libselinux-0922c95c.so.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libselinux-0922c95c.so.1
new file mode 100644
index 00000000..171b8366
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libselinux-0922c95c.so.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libssl-3a880ada.so.1.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libssl-3a880ada.so.1.1
new file mode 100644
index 00000000..c898206e
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-extract/psycopg2_binary.libs/libssl-3a880ada.so.1.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/__init__.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/__init__.py
index 5198b73c..59a89386 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/__init__.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/__init__.py
@@ -12,14 +12,14 @@
.. _Python: https://www.python.org/
:Groups:
-* `Connections creation`: connect
-* `Value objects constructors`: Binary, Date, DateFromTicks, Time,
+ * `Connections creation`: connect
+ * `Value objects constructors`: Binary, Date, DateFromTicks, Time,
TimeFromTicks, Timestamp, TimestampFromTicks
"""
# psycopg/__init__.py - initialization of the psycopg module
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -48,53 +48,31 @@
# Import the DBAPI-2.0 stuff into top-level module.
-from psycopg2._psycopg import ( # noqa
- BINARY,
- NUMBER,
- STRING,
- DATETIME,
- ROWID,
- Binary,
- Date,
- Time,
- Timestamp,
- DateFromTicks,
- TimeFromTicks,
- TimestampFromTicks,
- Error,
- Warning,
- DataError,
- DatabaseError,
- ProgrammingError,
- IntegrityError,
- InterfaceError,
- InternalError,
- NotSupportedError,
- OperationalError,
- _connect,
- apilevel,
- threadsafety,
- paramstyle,
- __version__,
- __libpq_version__,
-)
+from psycopg2._psycopg import ( # noqa
+ BINARY, NUMBER, STRING, DATETIME, ROWID,
+
+ Binary, Date, Time, Timestamp,
+ DateFromTicks, TimeFromTicks, TimestampFromTicks,
-from psycopg2 import tz # noqa
+ Error, Warning, DataError, DatabaseError, ProgrammingError, IntegrityError,
+ InterfaceError, InternalError, NotSupportedError, OperationalError,
+
+ _connect, apilevel, threadsafety, paramstyle,
+ __version__, __libpq_version__,
+)
# Register default adapters.
from psycopg2 import extensions as _ext
-
_ext.register_adapter(tuple, _ext.SQL_IN)
_ext.register_adapter(type(None), _ext.NoneAdapter)
# Register the Decimal adapter here instead of in the C layer.
# This way a new class is registered for each sub-interpreter.
# See ticket #52
-from decimal import Decimal # noqa
-from psycopg2._psycopg import Decimal as Adapter # noqa
-
+from decimal import Decimal # noqa
+from psycopg2._psycopg import Decimal as Adapter # noqa
_ext.register_adapter(Decimal, Adapter)
del Decimal, Adapter
@@ -135,13 +113,10 @@ def connect(dsn=None, connection_factory=None, cursor_factory=None, **kwargs):
"""
kwasync = {}
- if "async" in kwargs:
- kwasync["async"] = kwargs.pop("async")
- if "async_" in kwargs:
- kwasync["async_"] = kwargs.pop("async_")
-
- if dsn is None and not kwargs:
- raise TypeError("missing dsn and no parameters")
+ if 'async' in kwargs:
+ kwasync['async'] = kwargs.pop('async')
+ if 'async_' in kwargs:
+ kwasync['async_'] = kwargs.pop('async_')
dsn = _ext.make_dsn(dsn, **kwargs)
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_ipaddress.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_ipaddress.py
index 994cf9e8..d38566c8 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_ipaddress.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_ipaddress.py
@@ -4,7 +4,7 @@
# psycopg/_ipaddress.py - Ipaddres-based network types adaptation
#
# Copyright (C) 2016-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -24,8 +24,8 @@
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
-from psycopg2.extensions import new_type, new_array_type, register_type, register_adapter, QuotedString
-from psycopg2.compat import text_type
+from psycopg2.extensions import (
+ new_type, new_array_type, register_type, register_adapter, QuotedString)
# The module is imported on register_ipaddress
ipaddress = None
@@ -58,16 +58,17 @@ def register_ipaddress(conn_or_curs=None):
for c in _casters:
register_type(c, conn_or_curs)
- for t in [ipaddress.IPv4Interface, ipaddress.IPv6Interface, ipaddress.IPv4Network, ipaddress.IPv6Network]:
+ for t in [ipaddress.IPv4Interface, ipaddress.IPv6Interface,
+ ipaddress.IPv4Network, ipaddress.IPv6Network]:
register_adapter(t, adapt_ipaddress)
def _make_casters():
- inet = new_type((869,), "INET", cast_interface)
- ainet = new_array_type((1041,), "INET[]", inet)
+ inet = new_type((869,), 'INET', cast_interface)
+ ainet = new_array_type((1041,), 'INET[]', inet)
- cidr = new_type((650,), "CIDR", cast_network)
- acidr = new_array_type((651,), "CIDR[]", cidr)
+ cidr = new_type((650,), 'CIDR', cast_network)
+ acidr = new_array_type((651,), 'CIDR[]', cidr)
return [inet, ainet, cidr, acidr]
@@ -76,13 +77,13 @@ def cast_interface(s, cur=None):
if s is None:
return None
# Py2 version force the use of unicode. meh.
- return ipaddress.ip_interface(text_type(s))
+ return ipaddress.ip_interface(str(s))
def cast_network(s, cur=None):
if s is None:
return None
- return ipaddress.ip_network(text_type(s))
+ return ipaddress.ip_network(str(s))
def adapt_ipaddress(obj):
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_json.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_json.py
index a599baf9..95024223 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_json.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_json.py
@@ -8,7 +8,7 @@
# psycopg/_json.py - Implementation of the JSON adaptation objects
#
# Copyright (C) 2012-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -32,7 +32,6 @@
from psycopg2._psycopg import ISQLQuote, QuotedString
from psycopg2._psycopg import new_type, new_array_type, register_type
-from psycopg2.compat import PY2
# oids from PostgreSQL 9.2
@@ -44,7 +43,7 @@
JSONBARRAY_OID = 3807
-class Json(object):
+class Json:
"""
An `~psycopg2.extensions.ISQLQuote` wrapper to adapt a Python object to
:sql:`json` data type.
@@ -54,7 +53,6 @@ class Json(object):
used.
"""
-
def __init__(self, adapted, dumps=None):
self.adapted = adapted
self._conn = None
@@ -83,19 +81,13 @@ def getquoted(self):
qs.prepare(self._conn)
return qs.getquoted()
- if PY2:
-
- def __str__(self):
- return self.getquoted()
-
- else:
-
- def __str__(self):
- # getquoted is binary in Py3
- return self.getquoted().decode("ascii", "replace")
+ def __str__(self):
+ # getquoted is binary
+ return self.getquoted().decode('ascii', 'replace')
-def register_json(conn_or_curs=None, globally=False, loads=None, oid=None, array_oid=None, name="json"):
+def register_json(conn_or_curs=None, globally=False, loads=None,
+ oid=None, array_oid=None, name='json'):
"""Create and register typecasters converting :sql:`json` type to Python objects.
:param conn_or_curs: a connection or cursor used to find the :sql:`json`
@@ -122,7 +114,8 @@ def register_json(conn_or_curs=None, globally=False, loads=None, oid=None, array
if oid is None:
oid, array_oid = _get_json_oids(conn_or_curs, name)
- JSON, JSONARRAY = _create_json_typecasters(oid, array_oid, loads=loads, name=name.upper())
+ JSON, JSONARRAY = _create_json_typecasters(
+ oid, array_oid, loads=loads, name=name.upper())
register_type(JSON, not globally and conn_or_curs or None)
@@ -141,9 +134,8 @@ def register_default_json(conn_or_curs=None, globally=False, loads=None):
for the default :sql:`json` type without querying the database.
All the parameters have the same meaning of `register_json()`.
"""
- return register_json(
- conn_or_curs=conn_or_curs, globally=globally, loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID
- )
+ return register_json(conn_or_curs=conn_or_curs, globally=globally,
+ loads=loads, oid=JSON_OID, array_oid=JSONARRAY_OID)
def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
@@ -155,12 +147,11 @@ def register_default_jsonb(conn_or_curs=None, globally=False, loads=None):
PostgreSQL 9.4 and following versions. All the parameters have the same
meaning of `register_json()`.
"""
- return register_json(
- conn_or_curs=conn_or_curs, globally=globally, loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name="jsonb"
- )
+ return register_json(conn_or_curs=conn_or_curs, globally=globally,
+ loads=loads, oid=JSONB_OID, array_oid=JSONBARRAY_OID, name='jsonb')
-def _create_json_typecasters(oid, array_oid, loads=None, name="JSON"):
+def _create_json_typecasters(oid, array_oid, loads=None, name='JSON'):
"""Create typecasters for json data type."""
if loads is None:
loads = json.loads
@@ -170,16 +161,16 @@ def typecast_json(s, cur):
return None
return loads(s)
- JSON = new_type((oid,), name, typecast_json)
+ JSON = new_type((oid, ), name, typecast_json)
if array_oid is not None:
- JSONARRAY = new_array_type((array_oid,), "%sARRAY" % name, JSON)
+ JSONARRAY = new_array_type((array_oid, ), f"{name}ARRAY", JSON)
else:
JSONARRAY = None
return JSON, JSONARRAY
-def _get_json_oids(conn_or_curs, name="json"):
+def _get_json_oids(conn_or_curs, name='json'):
# lazy imports
from psycopg2.extensions import STATUS_IN_TRANSACTION
from psycopg2.extras import _solve_conn_curs
@@ -193,7 +184,9 @@ def _get_json_oids(conn_or_curs, name="json"):
typarray = conn.info.server_version >= 80300 and "typarray" or "NULL"
# get the oid for the hstore
- curs.execute("SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;" % typarray, (name,))
+ curs.execute(
+ "SELECT t.oid, %s FROM pg_type t WHERE t.typname = %%s;"
+ % typarray, (name,))
r = curs.fetchone()
# revert the status of the connection as before the command
@@ -201,6 +194,6 @@ def _get_json_oids(conn_or_curs, name="json"):
conn.rollback()
if not r:
- raise conn.ProgrammingError("%s data type not found" % name)
+ raise conn.ProgrammingError(f"{name} data type not found")
return r
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_lru_cache.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_lru_cache.py
deleted file mode 100644
index 1e2c52d0..00000000
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_lru_cache.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""
-LRU cache implementation for Python 2.7
-
-Ported from http://code.activestate.com/recipes/578078/ and simplified for our
-use (only support maxsize > 0 and positional arguments).
-"""
-
-from collections import namedtuple
-from functools import update_wrapper
-from threading import RLock
-
-_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
-
-
-def lru_cache(maxsize=100):
- """Least-recently-used cache decorator.
-
- Arguments to the cached function must be hashable.
-
- See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
-
- """
- def decorating_function(user_function):
-
- cache = dict()
- stats = [0, 0] # make statistics updateable non-locally
- HITS, MISSES = 0, 1 # names for the stats fields
- cache_get = cache.get # bound method to lookup key or return None
- _len = len # localize the global len() function
- lock = RLock() # linkedlist updates aren't threadsafe
- root = [] # root of the circular doubly linked list
- root[:] = [root, root, None, None] # initialize by pointing to self
- nonlocal_root = [root] # make updateable non-locally
- PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
-
- assert maxsize and maxsize > 0, "maxsize %s not supported" % maxsize
-
- def wrapper(*args):
- # size limited caching that tracks accesses by recency
- key = args
- with lock:
- link = cache_get(key)
- if link is not None:
- # record recent use of the key by moving it to the
- # front of the list
- root, = nonlocal_root
- link_prev, link_next, key, result = link
- link_prev[NEXT] = link_next
- link_next[PREV] = link_prev
- last = root[PREV]
- last[NEXT] = root[PREV] = link
- link[PREV] = last
- link[NEXT] = root
- stats[HITS] += 1
- return result
- result = user_function(*args)
- with lock:
- root, = nonlocal_root
- if key in cache:
- # getting here means that this same key was added to the
- # cache while the lock was released. since the link
- # update is already done, we need only return the
- # computed result and update the count of misses.
- pass
- elif _len(cache) >= maxsize:
- # use the old root to store the new key and result
- oldroot = root
- oldroot[KEY] = key
- oldroot[RESULT] = result
- # empty the oldest link and make it the new root
- root = nonlocal_root[0] = oldroot[NEXT]
- oldkey = root[KEY]
- # oldvalue = root[RESULT]
- root[KEY] = root[RESULT] = None
- # now update the cache dictionary for the new links
- del cache[oldkey]
- cache[key] = oldroot
- else:
- # put result in a new link at the front of the list
- last = root[PREV]
- link = [last, root, key, result]
- last[NEXT] = root[PREV] = cache[key] = link
- stats[MISSES] += 1
- return result
-
- def cache_info():
- """Report cache statistics"""
- with lock:
- return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
-
- def cache_clear():
- """Clear the cache and cache statistics"""
- with lock:
- cache.clear()
- root = nonlocal_root[0]
- root[:] = [root, root, None, None]
- stats[:] = [0, 0]
-
- wrapper.__wrapped__ = user_function
- wrapper.cache_info = cache_info
- wrapper.cache_clear = cache_clear
- return update_wrapper(wrapper, user_function)
-
- return decorating_function
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_psycopg.cpython-38-x86_64-linux-gnu.so b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_psycopg.cpython-38-x86_64-linux-gnu.so
deleted file mode 100755
index eaf44a4e..00000000
Binary files a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_psycopg.cpython-38-x86_64-linux-gnu.so and /dev/null differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_psycopg.so b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_psycopg.so
new file mode 100644
index 00000000..5421979d
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_psycopg.so differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_range.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_range.py
index ea265156..64bae073 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_range.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/_range.py
@@ -5,7 +5,7 @@
# psycopg/_range.py - Implementation of the Range type and adaptation
#
# Copyright (C) 2012-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -30,10 +30,9 @@
from psycopg2._psycopg import ProgrammingError, InterfaceError
from psycopg2.extensions import ISQLQuote, adapt, register_adapter
from psycopg2.extensions import new_type, new_array_type, register_type
-from psycopg2.compat import string_types
-class Range(object):
+class Range:
"""Python representation for a PostgreSQL |range|_ type.
:param lower: lower bound for the range. `!None` means unbound
@@ -48,7 +47,7 @@ class Range(object):
def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
if not empty:
if bounds not in ('[)', '(]', '()', '[]'):
- raise ValueError("bound flags not valid: %r" % bounds)
+ raise ValueError(f"bound flags not valid: {bounds!r}")
self._lower = lower
self._upper = upper
@@ -58,9 +57,9 @@ def __init__(self, lower=None, upper=None, bounds='[)', empty=False):
def __repr__(self):
if self._bounds is None:
- return "%s(empty=True)" % self.__class__.__name__
+ return f"{self.__class__.__name__}(empty=True)"
else:
- return "%s(%r, %r, %r)" % (self.__class__.__name__,
+ return "{}({!r}, {!r}, {!r})".format(self.__class__.__name__,
self._lower, self._upper, self._bounds)
def __str__(self):
@@ -144,10 +143,6 @@ def __contains__(self, x):
def __bool__(self):
return self._bounds is not None
- def __nonzero__(self):
- # Python 2 compatibility
- return type(self).__bool__(self)
-
def __eq__(self, other):
if not isinstance(other, Range):
return False
@@ -239,7 +234,7 @@ def register_range(pgrange, pyrange, conn_or_curs, globally=False):
return caster
-class RangeAdapter(object):
+class RangeAdapter:
"""`ISQLQuote` adapter for `Range` subclasses.
This is an abstract class: concrete classes must set a `name` class
@@ -287,7 +282,7 @@ def getquoted(self):
+ b", '" + r._bounds.encode('utf8') + b"')"
-class RangeCaster(object):
+class RangeCaster:
"""Helper class to convert between `Range` and PostgreSQL range types.
Objects of this class are usually created by `register_range()`. Manual
@@ -315,7 +310,7 @@ def _create_ranges(self, pgrange, pyrange):
# an implementation detail and is not documented. It is currently used
# for the numeric ranges.
self.adapter = None
- if isinstance(pgrange, string_types):
+ if isinstance(pgrange, str):
self.adapter = type(pgrange, (RangeAdapter,), {})
self.adapter.name = pgrange
else:
@@ -332,7 +327,7 @@ def _create_ranges(self, pgrange, pyrange):
self.range = None
try:
- if isinstance(pyrange, string_types):
+ if isinstance(pyrange, str):
self.range = type(pyrange, (Range,), {})
if issubclass(pyrange, Range) and pyrange is not Range:
self.range = pyrange
@@ -368,33 +363,54 @@ def _from_db(self, name, pyrange, conn_or_curs):
schema = 'public'
# get the type oid and attributes
- try:
- curs.execute("""\
-select rngtypid, rngsubtype,
- (select typarray from pg_type where oid = rngtypid)
+ curs.execute("""\
+select rngtypid, rngsubtype, typarray
from pg_range r
join pg_type t on t.oid = rngtypid
join pg_namespace ns on ns.oid = typnamespace
where typname = %s and ns.nspname = %s;
""", (tname, schema))
+ rec = curs.fetchone()
- except ProgrammingError:
- if not conn.autocommit:
- conn.rollback()
- raise
- else:
- rec = curs.fetchone()
+ if not rec:
+ # The above algorithm doesn't work for customized seach_path
+ # (#1487) The implementation below works better, but, to guarantee
+ # backwards compatibility, use it only if the original one failed.
+ try:
+ savepoint = False
+ # Because we executed statements earlier, we are either INTRANS
+ # or we are IDLE only if the transaction is autocommit, in
+ # which case we don't need the savepoint anyway.
+ if conn.status == STATUS_IN_TRANSACTION:
+ curs.execute("SAVEPOINT register_type")
+ savepoint = True
+
+ curs.execute("""\
+SELECT rngtypid, rngsubtype, typarray, typname, nspname
+from pg_range r
+join pg_type t on t.oid = rngtypid
+join pg_namespace ns on ns.oid = typnamespace
+WHERE t.oid = %s::regtype
+""", (name, ))
+ except ProgrammingError:
+ pass
+ else:
+ rec = curs.fetchone()
+ if rec:
+ tname, schema = rec[3:]
+ finally:
+ if savepoint:
+ curs.execute("ROLLBACK TO SAVEPOINT register_type")
- # revert the status of the connection as before the command
- if (conn_status != STATUS_IN_TRANSACTION
- and not conn.autocommit):
- conn.rollback()
+ # revert the status of the connection as before the command
+ if conn_status != STATUS_IN_TRANSACTION and not conn.autocommit:
+ conn.rollback()
if not rec:
raise ProgrammingError(
- "PostgreSQL type '%s' not found" % name)
+ f"PostgreSQL range '{name}' not found")
- type, subtype, array = rec
+ type, subtype, array = rec[:3]
return RangeCaster(name, pyrange,
oid=type, subtype_oid=subtype, array_oid=array)
@@ -402,13 +418,13 @@ def _from_db(self, name, pyrange, conn_or_curs):
_re_range = re.compile(r"""
( \(|\[ ) # lower bound flag
(?: # lower bound:
- " ( (?: [^"] | "")* ) " # - a quoted string
- | ( [^",]+ ) # - or an unquoted string
+ " ( (?: [^"] | "")* ) " # - a quoted string
+ | ( [^",]+ ) # - or an unquoted string
)? # - or empty (not catched)
,
(?: # upper bound:
- " ( (?: [^"] | "")* ) " # - a quoted string
- | ( [^"\)\]]+ ) # - or an unquoted string
+ " ( (?: [^"] | "")* ) " # - a quoted string
+ | ( [^"\)\]]+ ) # - or an unquoted string
)? # - or empty (not catched)
( \)|\] ) # upper bound flag
""", re.VERBOSE)
@@ -424,7 +440,7 @@ def parse(self, s, cur=None):
m = self._re_range.match(s)
if m is None:
- raise InterfaceError("failed to parse range: '%s'" % s)
+ raise InterfaceError(f"failed to parse range: '{s}'")
lower = m.group(3)
if lower is None:
@@ -504,8 +520,7 @@ def getquoted(self):
else:
upper = ''
- return ("'%s%s,%s%s'" % (
- r._bounds[0], lower, upper, r._bounds[1])).encode('ascii')
+ return (f"'{r._bounds[0]}{lower},{upper}{r._bounds[1]}'").encode('ascii')
# TODO: probably won't work with infs, nans and other tricky cases.
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/compat.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/compat.py
deleted file mode 100644
index 54606a80..00000000
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/compat.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import sys
-
-__all__ = ['string_types', 'text_type', 'lru_cache']
-
-if sys.version_info[0] == 2:
- # Python 2
- PY2 = True
- PY3 = False
- string_types = basestring,
- text_type = unicode
- from ._lru_cache import lru_cache
-
-else:
- # Python 3
- PY2 = False
- PY3 = True
- string_types = str,
- text_type = str
- from functools import lru_cache
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errorcodes.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errorcodes.py
index 9baceec0..aa646c46 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errorcodes.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errorcodes.py
@@ -1,11 +1,11 @@
-"""Error codes for PostgresSQL
+"""Error codes for PostgreSQL
This module contains symbolic names for all PostgreSQL error codes.
"""
# psycopg2/errorcodes.py - PostgreSQL error codes
#
# Copyright (C) 2006-2019 Johan Dahlin
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -43,7 +43,8 @@ def lookup(code, _cache={}):
tmp = {}
for k, v in globals().items():
if isinstance(v, str) and len(v) in (2, 5):
- tmp[v] = k
+ # Strip trailing underscore used to disambiguate duplicate values
+ tmp[v] = k.rstrip("_")
assert tmp
@@ -56,390 +57,393 @@ def lookup(code, _cache={}):
# autogenerated data: do not edit below this point.
# Error classes
-CLASS_SUCCESSFUL_COMPLETION = "00"
-CLASS_WARNING = "01"
-CLASS_NO_DATA = "02"
-CLASS_SQL_STATEMENT_NOT_YET_COMPLETE = "03"
-CLASS_CONNECTION_EXCEPTION = "08"
-CLASS_TRIGGERED_ACTION_EXCEPTION = "09"
-CLASS_FEATURE_NOT_SUPPORTED = "0A"
-CLASS_INVALID_TRANSACTION_INITIATION = "0B"
-CLASS_LOCATOR_EXCEPTION = "0F"
-CLASS_INVALID_GRANTOR = "0L"
-CLASS_INVALID_ROLE_SPECIFICATION = "0P"
-CLASS_DIAGNOSTICS_EXCEPTION = "0Z"
-CLASS_CASE_NOT_FOUND = "20"
-CLASS_CARDINALITY_VIOLATION = "21"
-CLASS_DATA_EXCEPTION = "22"
-CLASS_INTEGRITY_CONSTRAINT_VIOLATION = "23"
-CLASS_INVALID_CURSOR_STATE = "24"
-CLASS_INVALID_TRANSACTION_STATE = "25"
-CLASS_INVALID_SQL_STATEMENT_NAME = "26"
-CLASS_TRIGGERED_DATA_CHANGE_VIOLATION = "27"
-CLASS_INVALID_AUTHORIZATION_SPECIFICATION = "28"
-CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = "2B"
-CLASS_INVALID_TRANSACTION_TERMINATION = "2D"
-CLASS_SQL_ROUTINE_EXCEPTION = "2F"
-CLASS_INVALID_CURSOR_NAME = "34"
-CLASS_EXTERNAL_ROUTINE_EXCEPTION = "38"
-CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = "39"
-CLASS_SAVEPOINT_EXCEPTION = "3B"
-CLASS_INVALID_CATALOG_NAME = "3D"
-CLASS_INVALID_SCHEMA_NAME = "3F"
-CLASS_TRANSACTION_ROLLBACK = "40"
-CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = "42"
-CLASS_WITH_CHECK_OPTION_VIOLATION = "44"
-CLASS_INSUFFICIENT_RESOURCES = "53"
-CLASS_PROGRAM_LIMIT_EXCEEDED = "54"
-CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = "55"
-CLASS_OPERATOR_INTERVENTION = "57"
-CLASS_SYSTEM_ERROR = "58"
-CLASS_SNAPSHOT_FAILURE = "72"
-CLASS_CONFIGURATION_FILE_ERROR = "F0"
-CLASS_FOREIGN_DATA_WRAPPER_ERROR = "HV"
-CLASS_PL_PGSQL_ERROR = "P0"
-CLASS_INTERNAL_ERROR = "XX"
+CLASS_SUCCESSFUL_COMPLETION = '00'
+CLASS_WARNING = '01'
+CLASS_NO_DATA = '02'
+CLASS_SQL_STATEMENT_NOT_YET_COMPLETE = '03'
+CLASS_CONNECTION_EXCEPTION = '08'
+CLASS_TRIGGERED_ACTION_EXCEPTION = '09'
+CLASS_FEATURE_NOT_SUPPORTED = '0A'
+CLASS_INVALID_TRANSACTION_INITIATION = '0B'
+CLASS_LOCATOR_EXCEPTION = '0F'
+CLASS_INVALID_GRANTOR = '0L'
+CLASS_INVALID_ROLE_SPECIFICATION = '0P'
+CLASS_DIAGNOSTICS_EXCEPTION = '0Z'
+CLASS_CASE_NOT_FOUND = '20'
+CLASS_CARDINALITY_VIOLATION = '21'
+CLASS_DATA_EXCEPTION = '22'
+CLASS_INTEGRITY_CONSTRAINT_VIOLATION = '23'
+CLASS_INVALID_CURSOR_STATE = '24'
+CLASS_INVALID_TRANSACTION_STATE = '25'
+CLASS_INVALID_SQL_STATEMENT_NAME = '26'
+CLASS_TRIGGERED_DATA_CHANGE_VIOLATION = '27'
+CLASS_INVALID_AUTHORIZATION_SPECIFICATION = '28'
+CLASS_DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B'
+CLASS_INVALID_TRANSACTION_TERMINATION = '2D'
+CLASS_SQL_ROUTINE_EXCEPTION = '2F'
+CLASS_INVALID_CURSOR_NAME = '34'
+CLASS_EXTERNAL_ROUTINE_EXCEPTION = '38'
+CLASS_EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39'
+CLASS_SAVEPOINT_EXCEPTION = '3B'
+CLASS_INVALID_CATALOG_NAME = '3D'
+CLASS_INVALID_SCHEMA_NAME = '3F'
+CLASS_TRANSACTION_ROLLBACK = '40'
+CLASS_SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42'
+CLASS_WITH_CHECK_OPTION_VIOLATION = '44'
+CLASS_INSUFFICIENT_RESOURCES = '53'
+CLASS_PROGRAM_LIMIT_EXCEEDED = '54'
+CLASS_OBJECT_NOT_IN_PREREQUISITE_STATE = '55'
+CLASS_OPERATOR_INTERVENTION = '57'
+CLASS_SYSTEM_ERROR = '58'
+CLASS_SNAPSHOT_FAILURE = '72'
+CLASS_CONFIGURATION_FILE_ERROR = 'F0'
+CLASS_FOREIGN_DATA_WRAPPER_ERROR = 'HV'
+CLASS_PL_PGSQL_ERROR = 'P0'
+CLASS_INTERNAL_ERROR = 'XX'
# Class 00 - Successful Completion
-SUCCESSFUL_COMPLETION = "00000"
+SUCCESSFUL_COMPLETION = '00000'
# Class 01 - Warning
-WARNING = "01000"
-NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = "01003"
-STRING_DATA_RIGHT_TRUNCATION = "01004"
-PRIVILEGE_NOT_REVOKED = "01006"
-PRIVILEGE_NOT_GRANTED = "01007"
-IMPLICIT_ZERO_BIT_PADDING = "01008"
-DYNAMIC_RESULT_SETS_RETURNED = "0100C"
-DEPRECATED_FEATURE = "01P01"
+WARNING = '01000'
+NULL_VALUE_ELIMINATED_IN_SET_FUNCTION = '01003'
+STRING_DATA_RIGHT_TRUNCATION_ = '01004'
+PRIVILEGE_NOT_REVOKED = '01006'
+PRIVILEGE_NOT_GRANTED = '01007'
+IMPLICIT_ZERO_BIT_PADDING = '01008'
+DYNAMIC_RESULT_SETS_RETURNED = '0100C'
+DEPRECATED_FEATURE = '01P01'
# Class 02 - No Data (this is also a warning class per the SQL standard)
-NO_DATA = "02000"
-NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED = "02001"
+NO_DATA = '02000'
+NO_ADDITIONAL_DYNAMIC_RESULT_SETS_RETURNED = '02001'
# Class 03 - SQL Statement Not Yet Complete
-SQL_STATEMENT_NOT_YET_COMPLETE = "03000"
+SQL_STATEMENT_NOT_YET_COMPLETE = '03000'
# Class 08 - Connection Exception
-CONNECTION_EXCEPTION = "08000"
-SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION = "08001"
-CONNECTION_DOES_NOT_EXIST = "08003"
-SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION = "08004"
-CONNECTION_FAILURE = "08006"
-TRANSACTION_RESOLUTION_UNKNOWN = "08007"
-PROTOCOL_VIOLATION = "08P01"
+CONNECTION_EXCEPTION = '08000'
+SQLCLIENT_UNABLE_TO_ESTABLISH_SQLCONNECTION = '08001'
+CONNECTION_DOES_NOT_EXIST = '08003'
+SQLSERVER_REJECTED_ESTABLISHMENT_OF_SQLCONNECTION = '08004'
+CONNECTION_FAILURE = '08006'
+TRANSACTION_RESOLUTION_UNKNOWN = '08007'
+PROTOCOL_VIOLATION = '08P01'
# Class 09 - Triggered Action Exception
-TRIGGERED_ACTION_EXCEPTION = "09000"
+TRIGGERED_ACTION_EXCEPTION = '09000'
# Class 0A - Feature Not Supported
-FEATURE_NOT_SUPPORTED = "0A000"
+FEATURE_NOT_SUPPORTED = '0A000'
# Class 0B - Invalid Transaction Initiation
-INVALID_TRANSACTION_INITIATION = "0B000"
+INVALID_TRANSACTION_INITIATION = '0B000'
# Class 0F - Locator Exception
-LOCATOR_EXCEPTION = "0F000"
-INVALID_LOCATOR_SPECIFICATION = "0F001"
+LOCATOR_EXCEPTION = '0F000'
+INVALID_LOCATOR_SPECIFICATION = '0F001'
# Class 0L - Invalid Grantor
-INVALID_GRANTOR = "0L000"
-INVALID_GRANT_OPERATION = "0LP01"
+INVALID_GRANTOR = '0L000'
+INVALID_GRANT_OPERATION = '0LP01'
# Class 0P - Invalid Role Specification
-INVALID_ROLE_SPECIFICATION = "0P000"
+INVALID_ROLE_SPECIFICATION = '0P000'
# Class 0Z - Diagnostics Exception
-DIAGNOSTICS_EXCEPTION = "0Z000"
-STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = "0Z002"
+DIAGNOSTICS_EXCEPTION = '0Z000'
+STACKED_DIAGNOSTICS_ACCESSED_WITHOUT_ACTIVE_HANDLER = '0Z002'
# Class 20 - Case Not Found
-CASE_NOT_FOUND = "20000"
+CASE_NOT_FOUND = '20000'
# Class 21 - Cardinality Violation
-CARDINALITY_VIOLATION = "21000"
+CARDINALITY_VIOLATION = '21000'
# Class 22 - Data Exception
-DATA_EXCEPTION = "22000"
-STRING_DATA_RIGHT_TRUNCATION = "22001"
-NULL_VALUE_NO_INDICATOR_PARAMETER = "22002"
-NUMERIC_VALUE_OUT_OF_RANGE = "22003"
-NULL_VALUE_NOT_ALLOWED = "22004"
-ERROR_IN_ASSIGNMENT = "22005"
-INVALID_DATETIME_FORMAT = "22007"
-DATETIME_FIELD_OVERFLOW = "22008"
-INVALID_TIME_ZONE_DISPLACEMENT_VALUE = "22009"
-ESCAPE_CHARACTER_CONFLICT = "2200B"
-INVALID_USE_OF_ESCAPE_CHARACTER = "2200C"
-INVALID_ESCAPE_OCTET = "2200D"
-ZERO_LENGTH_CHARACTER_STRING = "2200F"
-MOST_SPECIFIC_TYPE_MISMATCH = "2200G"
-SEQUENCE_GENERATOR_LIMIT_EXCEEDED = "2200H"
-NOT_AN_XML_DOCUMENT = "2200L"
-INVALID_XML_DOCUMENT = "2200M"
-INVALID_XML_CONTENT = "2200N"
-INVALID_XML_COMMENT = "2200S"
-INVALID_XML_PROCESSING_INSTRUCTION = "2200T"
-INVALID_INDICATOR_PARAMETER_VALUE = "22010"
-SUBSTRING_ERROR = "22011"
-DIVISION_BY_ZERO = "22012"
-INVALID_PRECEDING_OR_FOLLOWING_SIZE = "22013"
-INVALID_ARGUMENT_FOR_NTILE_FUNCTION = "22014"
-INTERVAL_FIELD_OVERFLOW = "22015"
-INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION = "22016"
-INVALID_CHARACTER_VALUE_FOR_CAST = "22018"
-INVALID_ESCAPE_CHARACTER = "22019"
-INVALID_REGULAR_EXPRESSION = "2201B"
-INVALID_ARGUMENT_FOR_LOGARITHM = "2201E"
-INVALID_ARGUMENT_FOR_POWER_FUNCTION = "2201F"
-INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION = "2201G"
-INVALID_ROW_COUNT_IN_LIMIT_CLAUSE = "2201W"
-INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE = "2201X"
-INVALID_LIMIT_VALUE = "22020"
-CHARACTER_NOT_IN_REPERTOIRE = "22021"
-INDICATOR_OVERFLOW = "22022"
-INVALID_PARAMETER_VALUE = "22023"
-UNTERMINATED_C_STRING = "22024"
-INVALID_ESCAPE_SEQUENCE = "22025"
-STRING_DATA_LENGTH_MISMATCH = "22026"
-TRIM_ERROR = "22027"
-ARRAY_SUBSCRIPT_ERROR = "2202E"
-INVALID_TABLESAMPLE_REPEAT = "2202G"
-INVALID_TABLESAMPLE_ARGUMENT = "2202H"
-DUPLICATE_JSON_OBJECT_KEY_VALUE = "22030"
-INVALID_JSON_TEXT = "22032"
-INVALID_SQL_JSON_SUBSCRIPT = "22033"
-MORE_THAN_ONE_SQL_JSON_ITEM = "22034"
-NO_SQL_JSON_ITEM = "22035"
-NON_NUMERIC_SQL_JSON_ITEM = "22036"
-NON_UNIQUE_KEYS_IN_A_JSON_OBJECT = "22037"
-SINGLETON_SQL_JSON_ITEM_REQUIRED = "22038"
-SQL_JSON_ARRAY_NOT_FOUND = "22039"
-SQL_JSON_MEMBER_NOT_FOUND = "2203A"
-SQL_JSON_NUMBER_NOT_FOUND = "2203B"
-SQL_JSON_OBJECT_NOT_FOUND = "2203C"
-TOO_MANY_JSON_ARRAY_ELEMENTS = "2203D"
-TOO_MANY_JSON_OBJECT_MEMBERS = "2203E"
-SQL_JSON_SCALAR_REQUIRED = "2203F"
-FLOATING_POINT_EXCEPTION = "22P01"
-INVALID_TEXT_REPRESENTATION = "22P02"
-INVALID_BINARY_REPRESENTATION = "22P03"
-BAD_COPY_FILE_FORMAT = "22P04"
-UNTRANSLATABLE_CHARACTER = "22P05"
-NONSTANDARD_USE_OF_ESCAPE_CHARACTER = "22P06"
+DATA_EXCEPTION = '22000'
+STRING_DATA_RIGHT_TRUNCATION = '22001'
+NULL_VALUE_NO_INDICATOR_PARAMETER = '22002'
+NUMERIC_VALUE_OUT_OF_RANGE = '22003'
+NULL_VALUE_NOT_ALLOWED_ = '22004'
+ERROR_IN_ASSIGNMENT = '22005'
+INVALID_DATETIME_FORMAT = '22007'
+DATETIME_FIELD_OVERFLOW = '22008'
+INVALID_TIME_ZONE_DISPLACEMENT_VALUE = '22009'
+ESCAPE_CHARACTER_CONFLICT = '2200B'
+INVALID_USE_OF_ESCAPE_CHARACTER = '2200C'
+INVALID_ESCAPE_OCTET = '2200D'
+ZERO_LENGTH_CHARACTER_STRING = '2200F'
+MOST_SPECIFIC_TYPE_MISMATCH = '2200G'
+SEQUENCE_GENERATOR_LIMIT_EXCEEDED = '2200H'
+NOT_AN_XML_DOCUMENT = '2200L'
+INVALID_XML_DOCUMENT = '2200M'
+INVALID_XML_CONTENT = '2200N'
+INVALID_XML_COMMENT = '2200S'
+INVALID_XML_PROCESSING_INSTRUCTION = '2200T'
+INVALID_INDICATOR_PARAMETER_VALUE = '22010'
+SUBSTRING_ERROR = '22011'
+DIVISION_BY_ZERO = '22012'
+INVALID_PRECEDING_OR_FOLLOWING_SIZE = '22013'
+INVALID_ARGUMENT_FOR_NTILE_FUNCTION = '22014'
+INTERVAL_FIELD_OVERFLOW = '22015'
+INVALID_ARGUMENT_FOR_NTH_VALUE_FUNCTION = '22016'
+INVALID_CHARACTER_VALUE_FOR_CAST = '22018'
+INVALID_ESCAPE_CHARACTER = '22019'
+INVALID_REGULAR_EXPRESSION = '2201B'
+INVALID_ARGUMENT_FOR_LOGARITHM = '2201E'
+INVALID_ARGUMENT_FOR_POWER_FUNCTION = '2201F'
+INVALID_ARGUMENT_FOR_WIDTH_BUCKET_FUNCTION = '2201G'
+INVALID_ROW_COUNT_IN_LIMIT_CLAUSE = '2201W'
+INVALID_ROW_COUNT_IN_RESULT_OFFSET_CLAUSE = '2201X'
+INVALID_LIMIT_VALUE = '22020'
+CHARACTER_NOT_IN_REPERTOIRE = '22021'
+INDICATOR_OVERFLOW = '22022'
+INVALID_PARAMETER_VALUE = '22023'
+UNTERMINATED_C_STRING = '22024'
+INVALID_ESCAPE_SEQUENCE = '22025'
+STRING_DATA_LENGTH_MISMATCH = '22026'
+TRIM_ERROR = '22027'
+ARRAY_SUBSCRIPT_ERROR = '2202E'
+INVALID_TABLESAMPLE_REPEAT = '2202G'
+INVALID_TABLESAMPLE_ARGUMENT = '2202H'
+DUPLICATE_JSON_OBJECT_KEY_VALUE = '22030'
+INVALID_ARGUMENT_FOR_SQL_JSON_DATETIME_FUNCTION = '22031'
+INVALID_JSON_TEXT = '22032'
+INVALID_SQL_JSON_SUBSCRIPT = '22033'
+MORE_THAN_ONE_SQL_JSON_ITEM = '22034'
+NO_SQL_JSON_ITEM = '22035'
+NON_NUMERIC_SQL_JSON_ITEM = '22036'
+NON_UNIQUE_KEYS_IN_A_JSON_OBJECT = '22037'
+SINGLETON_SQL_JSON_ITEM_REQUIRED = '22038'
+SQL_JSON_ARRAY_NOT_FOUND = '22039'
+SQL_JSON_MEMBER_NOT_FOUND = '2203A'
+SQL_JSON_NUMBER_NOT_FOUND = '2203B'
+SQL_JSON_OBJECT_NOT_FOUND = '2203C'
+TOO_MANY_JSON_ARRAY_ELEMENTS = '2203D'
+TOO_MANY_JSON_OBJECT_MEMBERS = '2203E'
+SQL_JSON_SCALAR_REQUIRED = '2203F'
+SQL_JSON_ITEM_CANNOT_BE_CAST_TO_TARGET_TYPE = '2203G'
+FLOATING_POINT_EXCEPTION = '22P01'
+INVALID_TEXT_REPRESENTATION = '22P02'
+INVALID_BINARY_REPRESENTATION = '22P03'
+BAD_COPY_FILE_FORMAT = '22P04'
+UNTRANSLATABLE_CHARACTER = '22P05'
+NONSTANDARD_USE_OF_ESCAPE_CHARACTER = '22P06'
# Class 23 - Integrity Constraint Violation
-INTEGRITY_CONSTRAINT_VIOLATION = "23000"
-RESTRICT_VIOLATION = "23001"
-NOT_NULL_VIOLATION = "23502"
-FOREIGN_KEY_VIOLATION = "23503"
-UNIQUE_VIOLATION = "23505"
-CHECK_VIOLATION = "23514"
-EXCLUSION_VIOLATION = "23P01"
+INTEGRITY_CONSTRAINT_VIOLATION = '23000'
+RESTRICT_VIOLATION = '23001'
+NOT_NULL_VIOLATION = '23502'
+FOREIGN_KEY_VIOLATION = '23503'
+UNIQUE_VIOLATION = '23505'
+CHECK_VIOLATION = '23514'
+EXCLUSION_VIOLATION = '23P01'
# Class 24 - Invalid Cursor State
-INVALID_CURSOR_STATE = "24000"
+INVALID_CURSOR_STATE = '24000'
# Class 25 - Invalid Transaction State
-INVALID_TRANSACTION_STATE = "25000"
-ACTIVE_SQL_TRANSACTION = "25001"
-BRANCH_TRANSACTION_ALREADY_ACTIVE = "25002"
-INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION = "25003"
-INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION = "25004"
-NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION = "25005"
-READ_ONLY_SQL_TRANSACTION = "25006"
-SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED = "25007"
-HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = "25008"
-NO_ACTIVE_SQL_TRANSACTION = "25P01"
-IN_FAILED_SQL_TRANSACTION = "25P02"
-IDLE_IN_TRANSACTION_SESSION_TIMEOUT = "25P03"
+INVALID_TRANSACTION_STATE = '25000'
+ACTIVE_SQL_TRANSACTION = '25001'
+BRANCH_TRANSACTION_ALREADY_ACTIVE = '25002'
+INAPPROPRIATE_ACCESS_MODE_FOR_BRANCH_TRANSACTION = '25003'
+INAPPROPRIATE_ISOLATION_LEVEL_FOR_BRANCH_TRANSACTION = '25004'
+NO_ACTIVE_SQL_TRANSACTION_FOR_BRANCH_TRANSACTION = '25005'
+READ_ONLY_SQL_TRANSACTION = '25006'
+SCHEMA_AND_DATA_STATEMENT_MIXING_NOT_SUPPORTED = '25007'
+HELD_CURSOR_REQUIRES_SAME_ISOLATION_LEVEL = '25008'
+NO_ACTIVE_SQL_TRANSACTION = '25P01'
+IN_FAILED_SQL_TRANSACTION = '25P02'
+IDLE_IN_TRANSACTION_SESSION_TIMEOUT = '25P03'
# Class 26 - Invalid SQL Statement Name
-INVALID_SQL_STATEMENT_NAME = "26000"
+INVALID_SQL_STATEMENT_NAME = '26000'
# Class 27 - Triggered Data Change Violation
-TRIGGERED_DATA_CHANGE_VIOLATION = "27000"
+TRIGGERED_DATA_CHANGE_VIOLATION = '27000'
# Class 28 - Invalid Authorization Specification
-INVALID_AUTHORIZATION_SPECIFICATION = "28000"
-INVALID_PASS_WORD = "28P01"
+INVALID_AUTHORIZATION_SPECIFICATION = '28000'
+INVALID_PASSWORD = '28P01'
# Class 2B - Dependent Privilege Descriptors Still Exist
-DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = "2B000"
-DEPENDENT_OBJECTS_STILL_EXIST = "2BP01"
+DEPENDENT_PRIVILEGE_DESCRIPTORS_STILL_EXIST = '2B000'
+DEPENDENT_OBJECTS_STILL_EXIST = '2BP01'
# Class 2D - Invalid Transaction Termination
-INVALID_TRANSACTION_TERMINATION = "2D000"
+INVALID_TRANSACTION_TERMINATION = '2D000'
# Class 2F - SQL Routine Exception
-SQL_ROUTINE_EXCEPTION = "2F000"
-MODIFYING_SQL_DATA_NOT_PERMITTED = "2F002"
-PROHIBITED_SQL_STATEMENT_ATTEMPTED = "2F003"
-READING_SQL_DATA_NOT_PERMITTED = "2F004"
-FUNCTION_EXECUTED_NO_RETURN_STATEMENT = "2F005"
+SQL_ROUTINE_EXCEPTION = '2F000'
+MODIFYING_SQL_DATA_NOT_PERMITTED_ = '2F002'
+PROHIBITED_SQL_STATEMENT_ATTEMPTED_ = '2F003'
+READING_SQL_DATA_NOT_PERMITTED_ = '2F004'
+FUNCTION_EXECUTED_NO_RETURN_STATEMENT = '2F005'
# Class 34 - Invalid Cursor Name
-INVALID_CURSOR_NAME = "34000"
+INVALID_CURSOR_NAME = '34000'
# Class 38 - External Routine Exception
-EXTERNAL_ROUTINE_EXCEPTION = "38000"
-CONTAINING_SQL_NOT_PERMITTED = "38001"
-MODIFYING_SQL_DATA_NOT_PERMITTED = "38002"
-PROHIBITED_SQL_STATEMENT_ATTEMPTED = "38003"
-READING_SQL_DATA_NOT_PERMITTED = "38004"
+EXTERNAL_ROUTINE_EXCEPTION = '38000'
+CONTAINING_SQL_NOT_PERMITTED = '38001'
+MODIFYING_SQL_DATA_NOT_PERMITTED = '38002'
+PROHIBITED_SQL_STATEMENT_ATTEMPTED = '38003'
+READING_SQL_DATA_NOT_PERMITTED = '38004'
# Class 39 - External Routine Invocation Exception
-EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = "39000"
-INVALID_SQLSTATE_RETURNED = "39001"
-NULL_VALUE_NOT_ALLOWED = "39004"
-TRIGGER_PROTOCOL_VIOLATED = "39P01"
-SRF_PROTOCOL_VIOLATED = "39P02"
-EVENT_TRIGGER_PROTOCOL_VIOLATED = "39P03"
+EXTERNAL_ROUTINE_INVOCATION_EXCEPTION = '39000'
+INVALID_SQLSTATE_RETURNED = '39001'
+NULL_VALUE_NOT_ALLOWED = '39004'
+TRIGGER_PROTOCOL_VIOLATED = '39P01'
+SRF_PROTOCOL_VIOLATED = '39P02'
+EVENT_TRIGGER_PROTOCOL_VIOLATED = '39P03'
# Class 3B - Savepoint Exception
-SAVEPOINT_EXCEPTION = "3B000"
-INVALID_SAVEPOINT_SPECIFICATION = "3B001"
+SAVEPOINT_EXCEPTION = '3B000'
+INVALID_SAVEPOINT_SPECIFICATION = '3B001'
# Class 3D - Invalid Catalog Name
-INVALID_CATALOG_NAME = "3D000"
+INVALID_CATALOG_NAME = '3D000'
# Class 3F - Invalid Schema Name
-INVALID_SCHEMA_NAME = "3F000"
+INVALID_SCHEMA_NAME = '3F000'
# Class 40 - Transaction Rollback
-TRANSACTION_ROLLBACK = "40000"
-SERIALIZATION_FAILURE = "40001"
-TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION = "40002"
-STATEMENT_COMPLETION_UNKNOWN = "40003"
-DEADLOCK_DETECTED = "40P01"
+TRANSACTION_ROLLBACK = '40000'
+SERIALIZATION_FAILURE = '40001'
+TRANSACTION_INTEGRITY_CONSTRAINT_VIOLATION = '40002'
+STATEMENT_COMPLETION_UNKNOWN = '40003'
+DEADLOCK_DETECTED = '40P01'
# Class 42 - Syntax Error or Access Rule Violation
-SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = "42000"
-INSUFFICIENT_PRIVILEGE = "42501"
-SYNTAX_ERROR = "42601"
-INVALID_NAME = "42602"
-INVALID_COLUMN_DEFINITION = "42611"
-NAME_TOO_LONG = "42622"
-DUPLICATE_COLUMN = "42701"
-AMBIGUOUS_COLUMN = "42702"
-UNDEFINED_COLUMN = "42703"
-UNDEFINED_OBJECT = "42704"
-DUPLICATE_OBJECT = "42710"
-DUPLICATE_ALIAS = "42712"
-DUPLICATE_FUNCTION = "42723"
-AMBIGUOUS_FUNCTION = "42725"
-GROUPING_ERROR = "42803"
-DATATYPE_MISMATCH = "42804"
-WRONG_OBJECT_TYPE = "42809"
-INVALID_FOREIGN_K_EY = "42830"
-CANNOT_COERCE = "42846"
-UNDEFINED_FUNCTION = "42883"
-GENERATED_ALWAYS = "428C9"
-RESERVED_NAME = "42939"
-UNDEFINED_TABLE = "42P01"
-UNDEFINED_PARAMETER = "42P02"
-DUPLICATE_CURSOR = "42P03"
-DUPLICATE_DATABASE = "42P04"
-DUPLICATE_PREPARED_STATEMENT = "42P05"
-DUPLICATE_SCHEMA = "42P06"
-DUPLICATE_TABLE = "42P07"
-AMBIGUOUS_PARAMETER = "42P08"
-AMBIGUOUS_ALIAS = "42P09"
-INVALID_COLUMN_REFERENCE = "42P10"
-INVALID_CURSOR_DEFINITION = "42P11"
-INVALID_DATABASE_DEFINITION = "42P12"
-INVALID_FUNCTION_DEFINITION = "42P13"
-INVALID_PREPARED_STATEMENT_DEFINITION = "42P14"
-INVALID_SCHEMA_DEFINITION = "42P15"
-INVALID_TABLE_DEFINITION = "42P16"
-INVALID_OBJECT_DEFINITION = "42P17"
-INDETERMINATE_DATATYPE = "42P18"
-INVALID_RECURSION = "42P19"
-WINDOWING_ERROR = "42P20"
-COLLATION_MISMATCH = "42P21"
-INDETERMINATE_COLLATION = "42P22"
+SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION = '42000'
+INSUFFICIENT_PRIVILEGE = '42501'
+SYNTAX_ERROR = '42601'
+INVALID_NAME = '42602'
+INVALID_COLUMN_DEFINITION = '42611'
+NAME_TOO_LONG = '42622'
+DUPLICATE_COLUMN = '42701'
+AMBIGUOUS_COLUMN = '42702'
+UNDEFINED_COLUMN = '42703'
+UNDEFINED_OBJECT = '42704'
+DUPLICATE_OBJECT = '42710'
+DUPLICATE_ALIAS = '42712'
+DUPLICATE_FUNCTION = '42723'
+AMBIGUOUS_FUNCTION = '42725'
+GROUPING_ERROR = '42803'
+DATATYPE_MISMATCH = '42804'
+WRONG_OBJECT_TYPE = '42809'
+INVALID_FOREIGN_KEY = '42830'
+CANNOT_COERCE = '42846'
+UNDEFINED_FUNCTION = '42883'
+GENERATED_ALWAYS = '428C9'
+RESERVED_NAME = '42939'
+UNDEFINED_TABLE = '42P01'
+UNDEFINED_PARAMETER = '42P02'
+DUPLICATE_CURSOR = '42P03'
+DUPLICATE_DATABASE = '42P04'
+DUPLICATE_PREPARED_STATEMENT = '42P05'
+DUPLICATE_SCHEMA = '42P06'
+DUPLICATE_TABLE = '42P07'
+AMBIGUOUS_PARAMETER = '42P08'
+AMBIGUOUS_ALIAS = '42P09'
+INVALID_COLUMN_REFERENCE = '42P10'
+INVALID_CURSOR_DEFINITION = '42P11'
+INVALID_DATABASE_DEFINITION = '42P12'
+INVALID_FUNCTION_DEFINITION = '42P13'
+INVALID_PREPARED_STATEMENT_DEFINITION = '42P14'
+INVALID_SCHEMA_DEFINITION = '42P15'
+INVALID_TABLE_DEFINITION = '42P16'
+INVALID_OBJECT_DEFINITION = '42P17'
+INDETERMINATE_DATATYPE = '42P18'
+INVALID_RECURSION = '42P19'
+WINDOWING_ERROR = '42P20'
+COLLATION_MISMATCH = '42P21'
+INDETERMINATE_COLLATION = '42P22'
# Class 44 - WITH CHECK OPTION Violation
-WITH_CHECK_OPTION_VIOLATION = "44000"
+WITH_CHECK_OPTION_VIOLATION = '44000'
# Class 53 - Insufficient Resources
-INSUFFICIENT_RESOURCES = "53000"
-DISK_FULL = "53100"
-OUT_OF_MEMORY = "53200"
-TOO_MANY_CONNECTIONS = "53300"
-CONFIGURATION_LIMIT_EXCEEDED = "53400"
+INSUFFICIENT_RESOURCES = '53000'
+DISK_FULL = '53100'
+OUT_OF_MEMORY = '53200'
+TOO_MANY_CONNECTIONS = '53300'
+CONFIGURATION_LIMIT_EXCEEDED = '53400'
# Class 54 - Program Limit Exceeded
-PROGRAM_LIMIT_EXCEEDED = "54000"
-STATEMENT_TOO_COMPLEX = "54001"
-TOO_MANY_COLUMNS = "54011"
-TOO_MANY_ARGUMENTS = "54023"
+PROGRAM_LIMIT_EXCEEDED = '54000'
+STATEMENT_TOO_COMPLEX = '54001'
+TOO_MANY_COLUMNS = '54011'
+TOO_MANY_ARGUMENTS = '54023'
# Class 55 - Object Not In Prerequisite State
-OBJECT_NOT_IN_PREREQUISITE_STATE = "55000"
-OBJECT_IN_USE = "55006"
-CANT_CHANGE_RUNTIME_PARAM = "55P02"
-LOCK_NOT_AVAILABLE = "55P03"
-UNSAFE_NEW_ENUM_VALUE_USAGE = "55P04"
+OBJECT_NOT_IN_PREREQUISITE_STATE = '55000'
+OBJECT_IN_USE = '55006'
+CANT_CHANGE_RUNTIME_PARAM = '55P02'
+LOCK_NOT_AVAILABLE = '55P03'
+UNSAFE_NEW_ENUM_VALUE_USAGE = '55P04'
# Class 57 - Operator Intervention
-OPERATOR_INTERVENTION = "57000"
-QUERY_CANCELED = "57014"
-ADMIN_SHUTDOWN = "57P01"
-CRASH_SHUTDOWN = "57P02"
-CANNOT_CONNECT_NOW = "57P03"
-DATABASE_DROPPED = "57P04"
+OPERATOR_INTERVENTION = '57000'
+QUERY_CANCELED = '57014'
+ADMIN_SHUTDOWN = '57P01'
+CRASH_SHUTDOWN = '57P02'
+CANNOT_CONNECT_NOW = '57P03'
+DATABASE_DROPPED = '57P04'
+IDLE_SESSION_TIMEOUT = '57P05'
# Class 58 - System Error (errors external to PostgreSQL itself)
-SYSTEM_ERROR = "58000"
-IO_ERROR = "58030"
-UNDEFINED_FILE = "58P01"
-DUPLICATE_FILE = "58P02"
+SYSTEM_ERROR = '58000'
+IO_ERROR = '58030'
+UNDEFINED_FILE = '58P01'
+DUPLICATE_FILE = '58P02'
# Class 72 - Snapshot Failure
-SNAPSHOT_TOO_OLD = "72000"
+SNAPSHOT_TOO_OLD = '72000'
# Class F0 - Configuration File Error
-CONFIG_FILE_ERROR = "F0000"
-LOCK_FILE_EXISTS = "F0001"
+CONFIG_FILE_ERROR = 'F0000'
+LOCK_FILE_EXISTS = 'F0001'
# Class HV - Foreign Data Wrapper Error (SQL/MED)
-FDW_ERROR = "HV000"
-FDW_OUT_OF_MEMORY = "HV001"
-FDW_DYNAMIC_PARAMETER_VALUE_NEEDED = "HV002"
-FDW_INVALID_DATA_TYPE = "HV004"
-FDW_COLUMN_NAME_NOT_FOUND = "HV005"
-FDW_INVALID_DATA_TYPE_DESCRIPTORS = "HV006"
-FDW_INVALID_COLUMN_NAME = "HV007"
-FDW_INVALID_COLUMN_NUMBER = "HV008"
-FDW_INVALID_USE_OF_NULL_POINTER = "HV009"
-FDW_INVALID_STRING_FORMAT = "HV00A"
-FDW_INVALID_HANDLE = "HV00B"
-FDW_INVALID_OPTION_INDEX = "HV00C"
-FDW_INVALID_OPTION_NAME = "HV00D"
-FDW_OPTION_NAME_NOT_FOUND = "HV00J"
-FDW_REPLY_HANDLE = "HV00K"
-FDW_UNABLE_TO_CREATE_EXECUTION = "HV00L"
-FDW_UNABLE_TO_CREATE_REPLY = "HV00M"
-FDW_UNABLE_TO_ESTABLISH_CONNECTION = "HV00N"
-FDW_NO_SCHEMAS = "HV00P"
-FDW_SCHEMA_NOT_FOUND = "HV00Q"
-FDW_TABLE_NOT_FOUND = "HV00R"
-FDW_FUNCTION_SEQUENCE_ERROR = "HV010"
-FDW_TOO_MANY_HANDLES = "HV014"
-FDW_INCONSISTENT_DESCRIPTOR_INFORMATION = "HV021"
-FDW_INVALID_ATTRIBUTE_VALUE = "HV024"
-FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH = "HV090"
-FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER = "HV091"
+FDW_ERROR = 'HV000'
+FDW_OUT_OF_MEMORY = 'HV001'
+FDW_DYNAMIC_PARAMETER_VALUE_NEEDED = 'HV002'
+FDW_INVALID_DATA_TYPE = 'HV004'
+FDW_COLUMN_NAME_NOT_FOUND = 'HV005'
+FDW_INVALID_DATA_TYPE_DESCRIPTORS = 'HV006'
+FDW_INVALID_COLUMN_NAME = 'HV007'
+FDW_INVALID_COLUMN_NUMBER = 'HV008'
+FDW_INVALID_USE_OF_NULL_POINTER = 'HV009'
+FDW_INVALID_STRING_FORMAT = 'HV00A'
+FDW_INVALID_HANDLE = 'HV00B'
+FDW_INVALID_OPTION_INDEX = 'HV00C'
+FDW_INVALID_OPTION_NAME = 'HV00D'
+FDW_OPTION_NAME_NOT_FOUND = 'HV00J'
+FDW_REPLY_HANDLE = 'HV00K'
+FDW_UNABLE_TO_CREATE_EXECUTION = 'HV00L'
+FDW_UNABLE_TO_CREATE_REPLY = 'HV00M'
+FDW_UNABLE_TO_ESTABLISH_CONNECTION = 'HV00N'
+FDW_NO_SCHEMAS = 'HV00P'
+FDW_SCHEMA_NOT_FOUND = 'HV00Q'
+FDW_TABLE_NOT_FOUND = 'HV00R'
+FDW_FUNCTION_SEQUENCE_ERROR = 'HV010'
+FDW_TOO_MANY_HANDLES = 'HV014'
+FDW_INCONSISTENT_DESCRIPTOR_INFORMATION = 'HV021'
+FDW_INVALID_ATTRIBUTE_VALUE = 'HV024'
+FDW_INVALID_STRING_LENGTH_OR_BUFFER_LENGTH = 'HV090'
+FDW_INVALID_DESCRIPTOR_FIELD_IDENTIFIER = 'HV091'
# Class P0 - PL/pgSQL Error
-PLPGSQL_ERROR = "P0000"
-RAISE_EXCEPTION = "P0001"
-NO_DATA_FOUND = "P0002"
-TOO_MANY_ROWS = "P0003"
-ASSERT_FAILURE = "P0004"
+PLPGSQL_ERROR = 'P0000'
+RAISE_EXCEPTION = 'P0001'
+NO_DATA_FOUND = 'P0002'
+TOO_MANY_ROWS = 'P0003'
+ASSERT_FAILURE = 'P0004'
# Class XX - Internal Error
-INTERNAL_ERROR = "XX000"
-DATA_CORRUPTED = "XX001"
-INDEX_CORRUPTED = "XX002"
+INTERNAL_ERROR = 'XX000'
+DATA_CORRUPTED = 'XX001'
+INDEX_CORRUPTED = 'XX002'
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errors.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errors.py
index 98983fff..e4e47f5b 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errors.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/errors.py
@@ -4,7 +4,7 @@
# psycopg/errors.py - SQLSTATE and DB-API exceptions
#
# Copyright (C) 2018-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extensions.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extensions.py
index cdea76de..b938d0ce 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extensions.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extensions.py
@@ -6,14 +6,14 @@
- `cursor` -- the new-type inheritable cursor class
- `lobject` -- the new-type inheritable large object class
- `adapt()` -- exposes the PEP-246_ compatible adapting mechanism used
-by psycopg to adapt Python types to PostgreSQL ones
+ by psycopg to adapt Python types to PostgreSQL ones
.. _PEP-246: https://www.python.org/dev/peps/pep-0246/
"""
# psycopg/extensions.py - DBAPI-2.0 extensions specific to psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -42,14 +42,6 @@
ROWIDARRAY, STRINGARRAY, TIME, TIMEARRAY, UNICODE, UNICODEARRAY,
AsIs, Binary, Boolean, Float, Int, QuotedString, )
-try:
- from psycopg2._psycopg import ( # noqa
- MXDATE, MXDATETIME, MXDATETIMETZ, MXINTERVAL, MXTIME, MXDATEARRAY,
- MXDATETIMEARRAY, MXDATETIMETZARRAY, MXINTERVALARRAY, MXTIMEARRAY,
- DateFromMx, TimeFromMx, TimestampFromMx, IntervalFromMx, )
-except ImportError:
- pass
-
from psycopg2._psycopg import ( # noqa
PYDATE, PYDATETIME, PYDATETIMETZ, PYINTERVAL, PYTIME, PYDATEARRAY,
PYDATETIMEARRAY, PYDATETIMETZARRAY, PYINTERVALARRAY, PYTIMEARRAY,
@@ -106,7 +98,7 @@ def register_adapter(typ, callable):
# The SQL_IN class is the official adapter for tuples starting from 2.0.6.
-class SQL_IN(object):
+class SQL_IN:
"""Adapt any iterable to an SQL quotable object."""
def __init__(self, seq):
self._seq = seq
@@ -130,7 +122,7 @@ def __str__(self):
return str(self.getquoted())
-class NoneAdapter(object):
+class NoneAdapter:
"""Adapt None to NULL.
This adapter is not used normally as a fast path in mogrify uses NULL,
@@ -168,7 +160,7 @@ def make_dsn(dsn=None, **kwargs):
tmp.update(kwargs)
kwargs = tmp
- dsn = " ".join(["%s=%s" % (k, _param_escape(str(v)))
+ dsn = " ".join(["{}={}".format(k, _param_escape(str(v)))
for (k, v) in kwargs.items()])
# verify that the returned dsn is valid
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extras.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extras.py
index a24f7496..36e8ef9a 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extras.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/extras.py
@@ -6,7 +6,7 @@
# psycopg/extras.py - miscellaneous extra goodies for psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -38,7 +38,7 @@
from .extensions import cursor as _cursor
from .extensions import connection as _connection
from .extensions import adapt as _A, quote_ident
-from .compat import PY2, PY3, lru_cache
+from functools import lru_cache
from psycopg2._psycopg import ( # noqa
REPLICATION_PHYSICAL, REPLICATION_LOGICAL,
@@ -72,47 +72,47 @@ def __init__(self, *args, **kwargs):
else:
raise NotImplementedError(
"DictCursorBase can't be instantiated without a row factory.")
- super(DictCursorBase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._query_executed = False
self._prefetch = False
self.row_factory = row_factory
def fetchone(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchone()
+ res = super().fetchone()
return res
def fetchmany(self, size=None):
if self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchmany(size)
+ res = super().fetchmany(size)
return res
def fetchall(self):
if self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).fetchall()
+ res = super().fetchall()
return res
def __iter__(self):
try:
if self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
if self._query_executed:
self._build_index()
if not self._prefetch:
- res = super(DictCursorBase, self).__iter__()
+ res = super().__iter__()
first = next(res)
yield first
@@ -126,26 +126,29 @@ class DictConnection(_connection):
"""A connection that uses `DictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or DictCursor)
- return super(DictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class DictCursor(DictCursorBase):
- """A cursor that keeps a list of column name -> index mappings."""
+ """A cursor that keeps a list of column name -> index mappings__.
+
+ .. __: https://docs.python.org/glossary.html#term-mapping
+ """
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = DictRow
- super(DictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self._prefetch = True
def execute(self, query, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.index = OrderedDict()
self._query_executed = True
- return super(DictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -166,22 +169,22 @@ def __init__(self, cursor):
def __getitem__(self, x):
if not isinstance(x, (int, slice)):
x = self._index[x]
- return super(DictRow, self).__getitem__(x)
+ return super().__getitem__(x)
def __setitem__(self, x, v):
if not isinstance(x, (int, slice)):
x = self._index[x]
- super(DictRow, self).__setitem__(x, v)
+ super().__setitem__(x, v)
def items(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return ((n, g(self._index[n])) for n in self._index)
def keys(self):
return iter(self._index)
def values(self):
- g = super(DictRow, self).__getitem__
+ g = super().__getitem__
return (g(self._index[n]) for n in self._index)
def get(self, x, default=None):
@@ -198,7 +201,7 @@ def __contains__(self, x):
def __reduce__(self):
# this is apparently useless, but it fixes #1073
- return super(DictRow, self).__reduce__()
+ return super().__reduce__()
def __getstate__(self):
return self[:], self._index.copy()
@@ -207,27 +210,12 @@ def __setstate__(self, data):
self[:] = data[0]
self._index = data[1]
- if PY2:
- iterkeys = keys
- itervalues = values
- iteritems = items
- has_key = __contains__
-
- def keys(self):
- return list(self.iterkeys())
-
- def values(self):
- return tuple(self.itervalues())
-
- def items(self):
- return list(self.iteritems())
-
class RealDictConnection(_connection):
"""A connection that uses `RealDictCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or RealDictCursor)
- return super(RealDictConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class RealDictCursor(DictCursorBase):
@@ -240,17 +228,17 @@ class RealDictCursor(DictCursorBase):
"""
def __init__(self, *args, **kwargs):
kwargs['row_factory'] = RealDictRow
- super(RealDictCursor, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def execute(self, query, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def callproc(self, procname, vars=None):
self.column_mapping = []
self._query_executed = True
- return super(RealDictCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def _build_index(self):
if self._query_executed and self.description:
@@ -268,7 +256,7 @@ def __init__(self, *args, **kwargs):
else:
cursor = None
- super(RealDictRow, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
if cursor is not None:
# Required for named cursors
@@ -284,20 +272,20 @@ def __setitem__(self, key, value):
if RealDictRow in self:
# We are in the row building phase
mapping = self[RealDictRow]
- super(RealDictRow, self).__setitem__(mapping[key], value)
+ super().__setitem__(mapping[key], value)
if key == len(mapping) - 1:
# Row building finished
del self[RealDictRow]
return
- super(RealDictRow, self).__setitem__(key, value)
+ super().__setitem__(key, value)
class NamedTupleConnection(_connection):
"""A connection that uses `NamedTupleCursor` automatically."""
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory', self.cursor_factory or NamedTupleCursor)
- return super(NamedTupleConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class NamedTupleCursor(_cursor):
@@ -321,18 +309,18 @@ class NamedTupleCursor(_cursor):
def execute(self, query, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).execute(query, vars)
+ return super().execute(query, vars)
def executemany(self, query, vars):
self.Record = None
- return super(NamedTupleCursor, self).executemany(query, vars)
+ return super().executemany(query, vars)
def callproc(self, procname, vars=None):
self.Record = None
- return super(NamedTupleCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
def fetchone(self):
- t = super(NamedTupleCursor, self).fetchone()
+ t = super().fetchone()
if t is not None:
nt = self.Record
if nt is None:
@@ -340,14 +328,14 @@ def fetchone(self):
return nt._make(t)
def fetchmany(self, size=None):
- ts = super(NamedTupleCursor, self).fetchmany(size)
+ ts = super().fetchmany(size)
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
return list(map(nt._make, ts))
def fetchall(self):
- ts = super(NamedTupleCursor, self).fetchall()
+ ts = super().fetchall()
nt = self.Record
if nt is None:
nt = self.Record = self._make_nt()
@@ -355,7 +343,7 @@ def fetchall(self):
def __iter__(self):
try:
- it = super(NamedTupleCursor, self).__iter__()
+ it = super().__iter__()
t = next(it)
nt = self.Record
@@ -369,10 +357,6 @@ def __iter__(self):
except StopIteration:
return
- # ascii except alnum and underscore
- _re_clean = _re.compile(
- '[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
-
def _make_nt(self):
key = tuple(d[0] for d in self.description) if self.description else ()
return self._cached_make_nt(key)
@@ -381,7 +365,7 @@ def _make_nt(self):
def _do_make_nt(cls, key):
fields = []
for s in key:
- s = cls._re_clean.sub('_', s)
+ s = _re_clean.sub('_', s)
# Python identifier cannot start with numbers, namedtuple fields
# cannot start with underscore. So...
if s[0] == '_' or '0' <= s[0] <= '9':
@@ -433,7 +417,7 @@ def filter(self, msg, curs):
def _logtofile(self, msg, curs):
msg = self.filter(msg, curs)
if msg:
- if PY3 and isinstance(msg, bytes):
+ if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
self._logobj.write(msg + _os.linesep)
@@ -450,7 +434,7 @@ def _check(self):
def cursor(self, *args, **kwargs):
self._check()
kwargs.setdefault('cursor_factory', self.cursor_factory or LoggingCursor)
- return super(LoggingConnection, self).cursor(*args, **kwargs)
+ return super().cursor(*args, **kwargs)
class LoggingCursor(_cursor):
@@ -458,13 +442,13 @@ class LoggingCursor(_cursor):
def execute(self, query, vars=None):
try:
- return super(LoggingCursor, self).execute(query, vars)
+ return super().execute(query, vars)
finally:
self.connection.log(self.query, self)
def callproc(self, procname, vars=None):
try:
- return super(LoggingCursor, self).callproc(procname, vars)
+ return super().callproc(procname, vars)
finally:
self.connection.log(self.query, self)
@@ -487,9 +471,9 @@ def initialize(self, logobj, mintime=0):
def filter(self, msg, curs):
t = (_time.time() - curs.timestamp) * 1000
if t > self._mintime:
- if PY3 and isinstance(msg, bytes):
+ if isinstance(msg, bytes):
msg = msg.decode(_ext.encodings[self.encoding], 'replace')
- return msg + _os.linesep + " (execution time: %d ms)" % t
+ return f"{msg}{_os.linesep} (execution time: {t} ms)"
def cursor(self, *args, **kwargs):
kwargs.setdefault('cursor_factory',
@@ -513,14 +497,14 @@ class LogicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_LOGICAL
- super(LogicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class PhysicalReplicationConnection(_replicationConnection):
def __init__(self, *args, **kwargs):
kwargs['replication_type'] = REPLICATION_PHYSICAL
- super(PhysicalReplicationConnection, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
class StopReplication(Exception):
@@ -541,7 +525,7 @@ class ReplicationCursor(_replicationCursor):
def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None):
"""Create streaming replication slot."""
- command = "CREATE_REPLICATION_SLOT %s " % quote_ident(slot_name, self)
+ command = f"CREATE_REPLICATION_SLOT {quote_ident(slot_name, self)} "
if slot_type is None:
slot_type = self.connection.replication_type
@@ -552,7 +536,7 @@ def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None)
"output plugin name is required to create "
"logical replication slot")
- command += "LOGICAL %s" % quote_ident(output_plugin, self)
+ command += f"LOGICAL {quote_ident(output_plugin, self)}"
elif slot_type == REPLICATION_PHYSICAL:
if output_plugin is not None:
@@ -564,14 +548,14 @@ def create_replication_slot(self, slot_name, slot_type=None, output_plugin=None)
else:
raise psycopg2.ProgrammingError(
- "unrecognized replication type: %s" % repr(slot_type))
+ f"unrecognized replication type: {repr(slot_type)}")
self.execute(command)
def drop_replication_slot(self, slot_name):
"""Drop streaming replication slot."""
- command = "DROP_REPLICATION_SLOT %s" % quote_ident(slot_name, self)
+ command = f"DROP_REPLICATION_SLOT {quote_ident(slot_name, self)}"
self.execute(command)
def start_replication(
@@ -586,7 +570,7 @@ def start_replication(
if slot_type == REPLICATION_LOGICAL:
if slot_name:
- command += "SLOT %s " % quote_ident(slot_name, self)
+ command += f"SLOT {quote_ident(slot_name, self)} "
else:
raise psycopg2.ProgrammingError(
"slot name is required for logical replication")
@@ -595,19 +579,18 @@ def start_replication(
elif slot_type == REPLICATION_PHYSICAL:
if slot_name:
- command += "SLOT %s " % quote_ident(slot_name, self)
+ command += f"SLOT {quote_ident(slot_name, self)} "
# don't add "PHYSICAL", before 9.4 it was just START_REPLICATION XXX/XXX
else:
raise psycopg2.ProgrammingError(
- "unrecognized replication type: %s" % repr(slot_type))
+ f"unrecognized replication type: {repr(slot_type)}")
if type(start_lsn) is str:
lsn = start_lsn.split('/')
- lsn = "%X/%08X" % (int(lsn[0], 16), int(lsn[1], 16))
+ lsn = f"{int(lsn[0], 16):X}/{int(lsn[1], 16):08X}"
else:
- lsn = "%X/%08X" % ((start_lsn >> 32) & 0xFFFFFFFF,
- start_lsn & 0xFFFFFFFF)
+ lsn = f"{start_lsn >> 32 & 4294967295:X}/{start_lsn & 4294967295:08X}"
command += lsn
@@ -616,7 +599,7 @@ def start_replication(
raise psycopg2.ProgrammingError(
"cannot specify timeline for logical replication")
- command += " TIMELINE %d" % timeline
+ command += f" TIMELINE {timeline}"
if options:
if slot_type == REPLICATION_PHYSICAL:
@@ -627,7 +610,7 @@ def start_replication(
for k, v in options.items():
if not command.endswith('('):
command += ", "
- command += "%s %s" % (quote_ident(k, self), _A(str(v)))
+ command += f"{quote_ident(k, self)} {_A(str(v))}"
command += ")"
self.start_replication_expert(
@@ -640,7 +623,7 @@ def fileno(self):
# a dbtype and adapter for Python UUID type
-class UUID_adapter(object):
+class UUID_adapter:
"""Adapt Python's uuid.UUID__ type to PostgreSQL's uuid__.
.. __: https://docs.python.org/library/uuid.html
@@ -655,10 +638,10 @@ def __conform__(self, proto):
return self
def getquoted(self):
- return ("'%s'::uuid" % self._uuid).encode('utf8')
+ return (f"'{self._uuid}'::uuid").encode('utf8')
def __str__(self):
- return "'%s'::uuid" % self._uuid
+ return f"'{self._uuid}'::uuid"
def register_uuid(oids=None, conn_or_curs=None):
@@ -695,7 +678,7 @@ def register_uuid(oids=None, conn_or_curs=None):
# a type, dbtype and adapter for PostgreSQL inet type
-class Inet(object):
+class Inet:
"""Wrap a string to allow for correct SQL-quoting of inet values.
Note that this adapter does NOT check the passed value to make
@@ -707,7 +690,7 @@ def __init__(self, addr):
self.addr = addr
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self.addr)
+ return f"{self.__class__.__name__}({self.addr!r})"
def prepare(self, conn):
self._conn = conn
@@ -780,7 +763,7 @@ def wait_select(conn):
elif state == POLL_WRITE:
select.select([], [conn.fileno()], [])
else:
- raise conn.OperationalError("bad state from poll: %s" % state)
+ raise conn.OperationalError(f"bad state from poll: {state}")
except KeyboardInterrupt:
conn.cancel()
# the loop will be broken by a server error
@@ -802,7 +785,7 @@ def _solve_conn_curs(conn_or_curs):
return conn, curs
-class HstoreAdapter(object):
+class HstoreAdapter:
"""Adapt a Python dict to the hstore syntax."""
def __init__(self, wrapped):
self.wrapped = wrapped
@@ -882,7 +865,7 @@ def parse(self, s, cur, _bsdec=_re.compile(r"\\(.)")):
for m in self._re_hstore.finditer(s):
if m is None or m.start() != start:
raise psycopg2.InterfaceError(
- "error parsing hstore pair at char %d" % start)
+ f"error parsing hstore pair at char {start}")
k = _bsdec.sub(r'\1', m.group(1))
v = m.group(2)
if v is not None:
@@ -893,7 +876,7 @@ def parse(self, s, cur, _bsdec=_re.compile(r"\\(.)")):
if start < len(s):
raise psycopg2.InterfaceError(
- "error parsing hstore: unparsed data after char %d" % start)
+ f"error parsing hstore: unparsed data after char {start}")
return rv
@@ -921,12 +904,11 @@ def get_oids(self, conn_or_curs):
rv0, rv1 = [], []
# get the oid for the hstore
- curs.execute("""\
-SELECT t.oid, %s
+ curs.execute(f"""SELECT t.oid, {typarray}
FROM pg_type t JOIN pg_namespace ns
ON typnamespace = ns.oid
WHERE typname = 'hstore';
-""" % typarray)
+""")
for oids in curs:
rv0.append(oids[0])
rv1.append(oids[1])
@@ -990,12 +972,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
array_oid = tuple([x for x in array_oid if x])
# create and register the typecaster
- if PY2 and unicode:
- cast = HstoreAdapter.parse_unicode
- else:
- cast = HstoreAdapter.parse
-
- HSTORE = _ext.new_type(oid, "HSTORE", cast)
+ HSTORE = _ext.new_type(oid, "HSTORE", HstoreAdapter.parse)
_ext.register_type(HSTORE, not globally and conn_or_curs or None)
_ext.register_adapter(dict, HstoreAdapter)
@@ -1004,7 +981,7 @@ def register_hstore(conn_or_curs, globally=False, unicode=False,
_ext.register_type(HSTOREARRAY, not globally and conn_or_curs or None)
-class CompositeCaster(object):
+class CompositeCaster:
"""Helps conversion of a PostgreSQL composite type into a Python object.
The class is usually created by the `register_composite()` function.
@@ -1025,7 +1002,7 @@ def __init__(self, name, oid, attrs, array_oid=None, schema=None):
self.typecaster = _ext.new_type((oid,), name, self.parse)
if array_oid:
self.array_typecaster = _ext.new_array_type(
- (array_oid,), "%sARRAY" % name, self.typecaster)
+ (array_oid,), f"{name}ARRAY", self.typecaster)
else:
self.array_typecaster = None
@@ -1057,7 +1034,7 @@ def make(self, values):
return self._ctor(values)
_re_tokenize = _re.compile(r"""
-\(? ([,)]) # an empty token, representing NULL
+ \(? ([,)]) # an empty token, representing NULL
| \(? " ((?: [^"] | "")*) " [,)] # or a quoted string
| \(? ([^",)]+) [,)] # or an unquoted string
""", _re.VERBOSE)
@@ -1069,7 +1046,7 @@ def tokenize(self, s):
rv = []
for m in self._re_tokenize.finditer(s):
if m is None:
- raise psycopg2.InterfaceError("can't parse type: %r" % s)
+ raise psycopg2.InterfaceError(f"can't parse type: {s!r}")
if m.group(1) is not None:
rv.append(None)
elif m.group(2) is not None:
@@ -1080,6 +1057,7 @@ def tokenize(self, s):
return rv
def _create_type(self, name, attnames):
+ name = _re_clean.sub('_', name)
self.type = namedtuple(name, attnames)
self._ctor = self.type._make
@@ -1117,14 +1095,46 @@ def _from_db(self, name, conn_or_curs):
recs = curs.fetchall()
+ if not recs:
+ # The above algorithm doesn't work for customized seach_path
+ # (#1487) The implementation below works better, but, to guarantee
+ # backwards compatibility, use it only if the original one failed.
+ try:
+ savepoint = False
+ # Because we executed statements earlier, we are either INTRANS
+ # or we are IDLE only if the transaction is autocommit, in
+ # which case we don't need the savepoint anyway.
+ if conn.status == _ext.STATUS_IN_TRANSACTION:
+ curs.execute("SAVEPOINT register_type")
+ savepoint = True
+
+ curs.execute("""\
+SELECT t.oid, %s, attname, atttypid, typname, nspname
+FROM pg_type t
+JOIN pg_namespace ns ON typnamespace = ns.oid
+JOIN pg_attribute a ON attrelid = typrelid
+WHERE t.oid = %%s::regtype
+ AND attnum > 0 AND NOT attisdropped
+ORDER BY attnum;
+""" % typarray, (name, ))
+ except psycopg2.ProgrammingError:
+ pass
+ else:
+ recs = curs.fetchall()
+ if recs:
+ tname = recs[0][4]
+ schema = recs[0][5]
+ finally:
+ if savepoint:
+ curs.execute("ROLLBACK TO SAVEPOINT register_type")
+
# revert the status of the connection as before the command
- if (conn_status != _ext.STATUS_IN_TRANSACTION
- and not conn.autocommit):
+ if conn_status != _ext.STATUS_IN_TRANSACTION and not conn.autocommit:
conn.rollback()
if not recs:
raise psycopg2.ProgrammingError(
- "PostgreSQL type '%s' not found" % name)
+ f"PostgreSQL type '{name}' not found")
type_oid = recs[0][0]
array_oid = recs[0][1]
@@ -1223,11 +1233,11 @@ def execute_values(cur, sql, argslist, template=None, page_size=100, fetch=False
compose the query.
- If the *argslist* items are sequences it should contain positional
- placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" if there
- are constants value...).
+ placeholders (e.g. ``"(%s, %s, %s)"``, or ``"(%s, %s, 42)``" if there
+ are constants value...).
- If the *argslist* items are mappings it should contain named
- placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
+ placeholders (e.g. ``"(%(id)s, %(f1)s, 42)"``).
If not specified, assume the arguments are sequence and use a simple
positional template (i.e. ``(%s, %s, ...)``), with the number of
@@ -1323,3 +1333,8 @@ def _split_sql(sql):
raise ValueError("the query doesn't contain any '%s' placeholder")
return pre, post
+
+
+# ascii except alnum and underscore
+_re_clean = _re.compile(
+ '[' + _re.escape(' !"#$%&\'()*+,-./:;<=>?@[\\]^`{|}~') + ']')
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/pool.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/pool.py
index 30a29c33..9d67d68e 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/pool.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/pool.py
@@ -5,7 +5,7 @@
# psycopg/pool.py - pooling code for psycopg
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -33,7 +33,7 @@ class PoolError(psycopg2.Error):
pass
-class AbstractConnectionPool(object):
+class AbstractConnectionPool:
"""Generic key-based pooling code."""
def __init__(self, minconn, maxconn, *args, **kwargs):
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/sql.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/sql.py
index 68834522..69b352b7 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/sql.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/sql.py
@@ -4,7 +4,7 @@
# psycopg/sql.py - SQL composition utility module
#
# Copyright (C) 2016-2019 Daniele Varrazzo
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -27,13 +27,12 @@
import string
from psycopg2 import extensions as ext
-from psycopg2.compat import PY3, string_types
_formatter = string.Formatter()
-class Composable(object):
+class Composable:
"""
Abstract base class for objects that can be used to compose an SQL string.
@@ -51,7 +50,7 @@ def __init__(self, wrapped):
self._wrapped = wrapped
def __repr__(self):
- return "%s(%r)" % (self.__class__.__name__, self._wrapped)
+ return f"{self.__class__.__name__}({self._wrapped!r})"
def as_string(self, context):
"""
@@ -107,10 +106,10 @@ def __init__(self, seq):
for i in seq:
if not isinstance(i, Composable):
raise TypeError(
- "Composed elements must be Composable, got %r instead" % i)
+ f"Composed elements must be Composable, got {i!r} instead")
wrapped.append(i)
- super(Composed, self).__init__(wrapped)
+ super().__init__(wrapped)
@property
def seq(self):
@@ -148,7 +147,7 @@ def join(self, joiner):
"foo", "bar"
"""
- if isinstance(joiner, string_types):
+ if isinstance(joiner, str):
joiner = SQL(joiner)
elif not isinstance(joiner, SQL):
raise TypeError(
@@ -180,9 +179,9 @@ class SQL(Composable):
select "foo", "bar" from "table"
"""
def __init__(self, string):
- if not isinstance(string, string_types):
+ if not isinstance(string, str):
raise TypeError("SQL values must be strings")
- super(SQL, self).__init__(string)
+ super().__init__(string)
@property
def string(self):
@@ -324,10 +323,10 @@ def __init__(self, *strings):
raise TypeError("Identifier cannot be empty")
for s in strings:
- if not isinstance(s, string_types):
+ if not isinstance(s, str):
raise TypeError("SQL identifier parts must be strings")
- super(Identifier, self).__init__(strings)
+ super().__init__(strings)
@property
def strings(self):
@@ -345,9 +344,7 @@ def string(self):
"the Identifier wraps more than one than one string")
def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- ', '.join(map(repr, self._wrapped)))
+ return f"{self.__class__.__name__}({', '.join(map(repr, self._wrapped))})"
def as_string(self, context):
return '.'.join(ext.quote_ident(s, context) for s in self._wrapped)
@@ -392,7 +389,7 @@ def as_string(self, context):
a.prepare(conn)
rv = a.getquoted()
- if PY3 and isinstance(rv, bytes):
+ if isinstance(rv, bytes):
rv = rv.decode(ext.encodings[conn.encoding])
return rv
@@ -426,14 +423,14 @@ class Placeholder(Composable):
"""
def __init__(self, name=None):
- if isinstance(name, string_types):
+ if isinstance(name, str):
if ')' in name:
- raise ValueError("invalid name: %r" % name)
+ raise ValueError(f"invalid name: {name!r}")
elif name is not None:
- raise TypeError("expected string or None as name, got %r" % name)
+ raise TypeError(f"expected string or None as name, got {name!r}")
- super(Placeholder, self).__init__(name)
+ super().__init__(name)
@property
def name(self):
@@ -441,12 +438,14 @@ def name(self):
return self._wrapped
def __repr__(self):
- return "Placeholder(%r)" % (
- self._wrapped if self._wrapped is not None else '',)
+ if self._wrapped is None:
+ return f"{self.__class__.__name__}()"
+ else:
+ return f"{self.__class__.__name__}({self._wrapped!r})"
def as_string(self, context):
if self._wrapped is not None:
- return "%%(%s)s" % self._wrapped
+ return f"%({self._wrapped})s"
else:
return "%s"
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/tz.py b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/tz.py
index 555fe762..d88ca37c 100644
--- a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/tz.py
+++ b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2/tz.py
@@ -7,7 +7,7 @@
# psycopg/tz.py - tzinfo implementation
#
# Copyright (C) 2003-2019 Federico Di Gregorio
-# Copyright (C) 2020 The Psycopg Team
+# Copyright (C) 2020-2021 The Psycopg Team
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
@@ -45,9 +45,13 @@ class FixedOffsetTimezone(datetime.tzinfo):
offset and name that instance will be returned. This saves memory and
improves comparability.
+ .. versionchanged:: 2.9
+
+ The constructor can take either a timedelta or a number of minutes of
+ offset. Previously only minutes were supported.
+
.. __: https://docs.python.org/library/datetime.html
"""
-
_name = None
_offset = ZERO
@@ -55,27 +59,41 @@ class FixedOffsetTimezone(datetime.tzinfo):
def __init__(self, offset=None, name=None):
if offset is not None:
- self._offset = datetime.timedelta(minutes=offset)
+ if not isinstance(offset, datetime.timedelta):
+ offset = datetime.timedelta(minutes=offset)
+ self._offset = offset
if name is not None:
self._name = name
def __new__(cls, offset=None, name=None):
- """Return a suitable instance created earlier if it exists"""
+ """Return a suitable instance created earlier if it exists
+ """
key = (offset, name)
try:
return cls._cache[key]
except KeyError:
- tz = super(FixedOffsetTimezone, cls).__new__(cls, offset, name)
+ tz = super().__new__(cls, offset, name)
cls._cache[key] = tz
return tz
def __repr__(self):
- offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
- return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" % (offset_mins, self._name)
+ return "psycopg2.tz.FixedOffsetTimezone(offset=%r, name=%r)" \
+ % (self._offset, self._name)
+
+ def __eq__(self, other):
+ if isinstance(other, FixedOffsetTimezone):
+ return self._offset == other._offset
+ else:
+ return NotImplemented
+
+ def __ne__(self, other):
+ if isinstance(other, FixedOffsetTimezone):
+ return self._offset != other._offset
+ else:
+ return NotImplemented
def __getinitargs__(self):
- offset_mins = self._offset.seconds // 60 + self._offset.days * 24 * 60
- return offset_mins, self._name
+ return self._offset, self._name
def utcoffset(self, dt):
return self._offset
@@ -83,14 +101,16 @@ def utcoffset(self, dt):
def tzname(self, dt):
if self._name is not None:
return self._name
- else:
- seconds = self._offset.seconds + self._offset.days * 86400
- hours, seconds = divmod(seconds, 3600)
- minutes = seconds / 60
- if minutes:
- return "%+03d:%d" % (hours, minutes)
- else:
- return "%+03d" % hours
+
+ minutes, seconds = divmod(self._offset.total_seconds(), 60)
+ hours, minutes = divmod(minutes, 60)
+ rv = "%+03d" % hours
+ if minutes or seconds:
+ rv += ":%02d" % minutes
+ if seconds:
+ rv += ":%02d" % seconds
+
+ return rv
def dst(self, dt):
return ZERO
@@ -109,7 +129,6 @@ class LocalTimezone(datetime.tzinfo):
This is the exact implementation from the Python 2.3 documentation.
"""
-
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
@@ -126,7 +145,9 @@ def tzname(self, dt):
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
- tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1)
+ tt = (dt.year, dt.month, dt.day,
+ dt.hour, dt.minute, dt.second,
+ dt.weekday(), 0, -1)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1
new file mode 100644
index 00000000..2e2b9408
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libcom_err-2abe824b.so.2.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libcrypto-2ade47cd.so.1.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libcrypto-2ade47cd.so.1.1
new file mode 100644
index 00000000..993c9b98
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libcrypto-2ade47cd.so.1.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2
new file mode 100644
index 00000000..262b9667
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libgssapi_krb5-497db0c6.so.2.2 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1
new file mode 100644
index 00000000..1a905987
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libk5crypto-b1f99d5c.so.3.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5
new file mode 100644
index 00000000..4026ca82
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkeyutils-dfe70bd6.so.1.5 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkrb5-6824148d.so.3.3 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkrb5-6824148d.so.3.3
new file mode 100644
index 00000000..e623ab01
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkrb5-6824148d.so.3.3 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkrb5support-f4e34ad2.so.0.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkrb5support-f4e34ad2.so.0.1
new file mode 100644
index 00000000..4dd88b41
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libkrb5support-f4e34ad2.so.0.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/liblber-2-f65b1f9f.4.so.2.11.7 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/liblber-2-f65b1f9f.4.so.2.11.7
new file mode 100644
index 00000000..25048a19
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/liblber-2-f65b1f9f.4.so.2.11.7 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libldap_r-2-f56d324d.4.so.2.11.7 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libldap_r-2-f56d324d.4.so.2.11.7
new file mode 100644
index 00000000..8a98ed06
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libldap_r-2-f56d324d.4.so.2.11.7 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0
new file mode 100644
index 00000000..9c8c5f52
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libpcre-9513aab5.so.1.2.0 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libpq-e85f78f2.so.5.15 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libpq-e85f78f2.so.5.15
new file mode 100644
index 00000000..f0a54073
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libpq-e85f78f2.so.5.15 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libsasl2-0f265e47.so.3.0.0 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libsasl2-0f265e47.so.3.0.0
new file mode 100644
index 00000000..da3f4bb3
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libsasl2-0f265e47.so.3.0.0 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libselinux-0922c95c.so.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libselinux-0922c95c.so.1
new file mode 100644
index 00000000..171b8366
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libselinux-0922c95c.so.1 differ
diff --git a/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libssl-3a880ada.so.1.1 b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libssl-3a880ada.so.1.1
new file mode 100644
index 00000000..c898206e
Binary files /dev/null and b/infrastructure/stacks/postcode_etl/functions/uec-sf-postcode-insert/psycopg2_binary.libs/libssl-3a880ada.so.1.1 differ
diff --git a/infrastructure/stacks/postcode_etl/locals.tf b/infrastructure/stacks/postcode_etl/locals.tf
index b136d345..382e2d39 100644
--- a/infrastructure/stacks/postcode_etl/locals.tf
+++ b/infrastructure/stacks/postcode_etl/locals.tf
@@ -3,32 +3,32 @@ locals {
postcode_service_account_policy_name = "${var.service_prefix}-policy"
postcode_insert_function_name = "${var.service_prefix}-postcode-insert"
postcode_insert_description = "Service Finder function to insert postcode and postcode mappings into DynamoDB"
- postcode_insert_runtime = "python3.8"
+ postcode_insert_runtime = "python3.9"
postcode_insert_timeout = 900
postcode_insert_memory_size = 2048
postcode_extract_function_name = "${var.service_prefix}-postcode-extract"
postcode_extract_description = "Service Finder function to extract postcode and postcode mapping from DoS database into csv files"
- postcode_extract_runtime = "python3.8"
+ postcode_extract_runtime = "python3.9"
postcode_extract_timeout = 900
postcode_extract_memory_size = 2048
# postcode_extract_core_dos_python_libs_arn = data.aws_lambda_layer_version.dos_python_libs.arn
region_update_function_name = "${var.service_prefix}-region-update"
region_update_description = "Service finder function to update postcode mappings with region and subregions"
- region_update_runtime = "python3.8"
+ region_update_runtime = "python3.9"
region_update_timeout = 900
region_update_memory_size = 2048
email_update_function_name = "${var.service_prefix}-email-update"
email_update_description = "Service finder function to update postcode mappings with email and ICBs"
- email_update_runtime = "python3.8"
+ email_update_runtime = "python3.9"
email_update_timeout = 900
email_update_memory_size = 2048
file_generator_function_name = "${var.service_prefix}-ccg-file-generator"
file_generator_description = "Service finder function to generate ccg csv from pcodey files"
- file_generator_runtime = "python3.8"
+ file_generator_runtime = "python3.9"
file_generator_timeout = 900
file_generator_memory_size = 2048
diff --git a/infrastructure/stacks/postcode_etl/postcode_insert.tf b/infrastructure/stacks/postcode_etl/postcode_insert.tf
index 6042d5bd..0b6752e4 100644
--- a/infrastructure/stacks/postcode_etl/postcode_insert.tf
+++ b/infrastructure/stacks/postcode_etl/postcode_insert.tf
@@ -2,7 +2,7 @@
resource "aws_lambda_function" "postcode_insert_lambda" {
filename = data.archive_file.postcode_insert_function.output_path
function_name = local.postcode_insert_function_name
- layers = ["arn:aws:lambda:eu-west-2:336392948345:layer:AWSSDKPandas-Python38:13"]
+ layers = ["arn:aws:lambda:eu-west-2:336392948345:layer:AWSSDKPandas-Python39:28"]
description = local.postcode_insert_description
role = aws_iam_role.postcode_insert_lambda_role.arn
handler = "postcode_insert.lambda_handler"
diff --git a/infrastructure/stacks/postcode_etl_sns/postcode_etl_sns.tf b/infrastructure/stacks/postcode_etl_sns/postcode_etl_sns.tf
index 5e032d75..178576fa 100644
--- a/infrastructure/stacks/postcode_etl_sns/postcode_etl_sns.tf
+++ b/infrastructure/stacks/postcode_etl_sns/postcode_etl_sns.tf
@@ -65,7 +65,7 @@ resource "aws_lambda_function" "postcode_etl_sns_lambda" {
role = aws_iam_role.postcode_etl_sns_role.arn
handler = "postcode_etl_sns.lambda_handler"
source_code_hash = data.archive_file.postcode_etl_sns_function.output_base64sha256
- runtime = "python3.8"
+ runtime = "python3.9"
publish = false
tags = local.standard_tags
environment {