com.google.common.util.concurrent.TimeLimiter#io.prestosql.spi.PrestoException源码实例Demo

下面列出了com.google.common.util.concurrent.TimeLimiter#io.prestosql.spi.PrestoException 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: presto   文件: ElasticsearchClient.java
public SearchResponse nextPage(String scrollId)
{
    LOG.debug("Next page: %s", scrollId);

    SearchScrollRequest request = new SearchScrollRequest(scrollId)
            .scroll(new TimeValue(scrollTimeout.toMillis()));

    long start = System.nanoTime();
    try {
        return client.searchScroll(request);
    }
    catch (IOException e) {
        throw new PrestoException(ELASTICSEARCH_CONNECTION_ERROR, e);
    }
    finally {
        nextPageStats.add(Duration.nanosSince(start));
    }
}
 
源代码2 项目: presto   文件: RecordFileWriter.java
public void appendRow(Page dataPage, int position)
{
    for (int field = 0; field < fieldCount; field++) {
        Block block = dataPage.getBlock(field);
        if (block.isNull(position)) {
            tableInspector.setStructFieldData(row, structFields.get(field), null);
        }
        else {
            setters[field].setField(block, position);
        }
    }

    try {
        recordWriter.write(serializer.serialize(row, tableInspector));
    }
    catch (SerDeException | IOException e) {
        throw new PrestoException(HIVE_WRITER_DATA_ERROR, e);
    }
}
 
源代码3 项目: presto   文件: EncodedPolylineFunctions.java
@Description("Encodes a linestring or multipoint geometry to a polyline")
@ScalarFunction("to_encoded_polyline")
@SqlType(StandardTypes.VARCHAR)
public static Slice toEncodedPolyline(@SqlType(GEOMETRY_TYPE_NAME) Slice input)
{
    OGCGeometry geometry = deserialize(input);
    validateType("encode_polyline", geometry, EnumSet.of(LINE_STRING, MULTI_POINT));
    GeometryType geometryType = GeometryType.getForEsriGeometryType(geometry.geometryType());
    switch (geometryType) {
        case LINE_STRING:
        case MULTI_POINT:
            return encodePolyline((MultiVertexGeometry) geometry.getEsriGeometry());
        default:
            throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Unexpected geometry type: " + geometryType);
    }
}
 
源代码4 项目: presto   文件: ExpressionAnalyzer.java
@Override
protected Type visitFormat(Format node, StackableAstVisitorContext<Context> context)
{
    List<Type> arguments = node.getArguments().stream()
            .map(expression -> process(expression, context))
            .collect(toImmutableList());

    if (!isVarcharType(arguments.get(0))) {
        throw semanticException(TYPE_MISMATCH, node.getArguments().get(0), "Type of first argument to format() must be VARCHAR (actual: %s)", arguments.get(0));
    }

    for (int i = 1; i < arguments.size(); i++) {
        try {
            FormatFunction.validateType(metadata, arguments.get(i));
        }
        catch (PrestoException e) {
            if (e.getErrorCode().equals(NOT_SUPPORTED.toErrorCode())) {
                throw semanticException(NOT_SUPPORTED, node.getArguments().get(i), "%s", e.getRawMessage());
            }
            throw e;
        }
    }

    return setExpressionType(node, VARCHAR);
}
 
源代码5 项目: presto   文件: BlackHoleMetadata.java
@Override
public Optional<ConnectorNewTableLayout> getNewTableLayout(ConnectorSession connectorSession, ConnectorTableMetadata tableMetadata)
{
    @SuppressWarnings("unchecked")
    List<String> distributeColumns = (List<String>) tableMetadata.getProperties().get(DISTRIBUTED_ON);
    if (distributeColumns.isEmpty()) {
        return Optional.empty();
    }

    Set<String> undefinedColumns = Sets.difference(
            ImmutableSet.copyOf(distributeColumns),
            tableMetadata.getColumns().stream()
                    .map(ColumnMetadata::getName)
                    .collect(toSet()));
    if (!undefinedColumns.isEmpty()) {
        throw new PrestoException(INVALID_TABLE_PROPERTY, "Distribute columns not defined on table: " + undefinedColumns);
    }

    return Optional.of(new ConnectorNewTableLayout(BlackHolePartitioningHandle.INSTANCE, distributeColumns));
}
 
源代码6 项目: presto   文件: ThriftHiveMetastore.java
@Override
public void updatePartitionStatistics(HiveIdentity identity, Table table, String partitionName, Function<PartitionStatistics, PartitionStatistics> update)
{
    List<Partition> partitions = getPartitionsByNames(identity, table.getDbName(), table.getTableName(), ImmutableList.of(partitionName));
    if (partitions.size() != 1) {
        throw new PrestoException(HIVE_METASTORE_ERROR, "Metastore returned multiple partitions for name: " + partitionName);
    }
    Partition originalPartition = getOnlyElement(partitions);

    PartitionStatistics currentStatistics = requireNonNull(
            getPartitionStatistics(identity, table, partitions).get(partitionName), "getPartitionStatistics() did not return statistics for partition");
    PartitionStatistics updatedStatistics = update.apply(currentStatistics);

    Partition modifiedPartition = originalPartition.deepCopy();
    HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics();
    modifiedPartition.setParameters(updateStatisticsParameters(modifiedPartition.getParameters(), basicStatistics));
    alterPartitionWithoutStatistics(identity, table.getDbName(), table.getTableName(), modifiedPartition);

    Map<String, HiveType> columns = modifiedPartition.getSd().getCols().stream()
            .collect(toImmutableMap(FieldSchema::getName, schema -> HiveType.valueOf(schema.getType())));
    setPartitionColumnStatistics(identity, table.getDbName(), table.getTableName(), partitionName, columns, updatedStatistics.getColumnStatistics(), basicStatistics.getRowCount());

    Set<String> removedStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet());
    removedStatistics.forEach(column -> deletePartitionColumnStatistics(identity, table.getDbName(), table.getTableName(), partitionName, column));
}
 
源代码7 项目: presto   文件: IpAddressDecoder.java
@Override
public void decode(SearchHit hit, Supplier<Object> getter, BlockBuilder output)
{
    Object value = getter.get();
    if (value == null) {
        output.appendNull();
    }
    else if (value instanceof String) {
        String address = (String) value;
        Slice slice = castToIpAddress(Slices.utf8Slice(address));
        ipAddressType.writeSlice(output, slice);
    }
    else {
        throw new PrestoException(TYPE_MISMATCH, format("Expected a string value for field '%s' of type IP: %s [%s]", path, value, value.getClass().getSimpleName()));
    }
}
 
源代码8 项目: presto   文件: JsonOperators.java
@ScalarOperator(CAST)
@LiteralParameters("x")
@SqlType(JSON)
public static Slice castFromVarchar(@SqlType("varchar(x)") Slice value)
{
    try {
        SliceOutput output = new DynamicSliceOutput(value.length() + 2);
        try (JsonGenerator jsonGenerator = createJsonGenerator(JSON_FACTORY, output)) {
            jsonGenerator.writeString(value.toStringUtf8());
        }
        return output.slice();
    }
    catch (IOException e) {
        throw new PrestoException(INVALID_CAST_ARGUMENT, format("Cannot cast '%s' to %s", value.toStringUtf8(), JSON));
    }
}
 
源代码9 项目: presto   文件: RowToRowCast.java
@Override
public ScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, Metadata metadata)
{
    checkArgument(arity == 1, "Expected arity to be 1");
    Type fromType = boundVariables.getTypeVariable("F");
    Type toType = boundVariables.getTypeVariable("T");
    if (fromType.getTypeParameters().size() != toType.getTypeParameters().size()) {
        throw new PrestoException(StandardErrorCode.INVALID_FUNCTION_ARGUMENT, "the size of fromType and toType must match");
    }
    Class<?> castOperatorClass = generateRowCast(fromType, toType, metadata);
    MethodHandle methodHandle = methodHandle(castOperatorClass, "castRow", ConnectorSession.class, Block.class);
    return new ScalarFunctionImplementation(
            false,
            ImmutableList.of(valueTypeArgumentProperty(RETURN_NULL_ON_NULL)),
            methodHandle);
}
 
源代码10 项目: presto   文件: OrcFileWriterFactory.java
private static CompressionKind getCompression(Properties schema, JobConf configuration)
{
    String compressionName = OrcConf.COMPRESS.getString(schema, configuration);
    if (compressionName == null) {
        return CompressionKind.ZLIB;
    }

    CompressionKind compression;
    try {
        compression = CompressionKind.valueOf(compressionName.toUpperCase(ENGLISH));
    }
    catch (IllegalArgumentException e) {
        throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Unknown ORC compression type " + compressionName);
    }
    return compression;
}
 
源代码11 项目: presto   文件: IcebergMetadata.java
@Override
public void createSchema(ConnectorSession session, String schemaName, Map<String, Object> properties, PrestoPrincipal owner)
{
    Optional<String> location = getSchemaLocation(properties).map(uri -> {
        try {
            hdfsEnvironment.getFileSystem(new HdfsContext(session, schemaName), new Path(uri));
        }
        catch (IOException | IllegalArgumentException e) {
            throw new PrestoException(INVALID_SCHEMA_PROPERTY, "Invalid location URI: " + uri, e);
        }
        return uri;
    });

    Database database = Database.builder()
            .setDatabaseName(schemaName)
            .setLocation(location)
            .setOwnerType(owner.getType())
            .setOwnerName(owner.getName())
            .build();

    metastore.createDatabase(new HiveIdentity(session), database);
}
 
源代码12 项目: presto   文件: OrcStorageManager.java
private void writeShard(UUID shardUuid)
{
    if (backupStore.isPresent() && !backupStore.get().shardExists(shardUuid)) {
        throw new PrestoException(RAPTOR_ERROR, "Backup does not exist after write");
    }

    File stagingFile = storageService.getStagingFile(shardUuid);
    File storageFile = storageService.getStorageFile(shardUuid);

    storageService.createParents(storageFile);

    try {
        Files.move(stagingFile.toPath(), storageFile.toPath(), ATOMIC_MOVE);
    }
    catch (IOException e) {
        throw new PrestoException(RAPTOR_ERROR, "Failed to move shard file", e);
    }
}
 
源代码13 项目: presto   文件: MongoPageSource.java
private void writeSlice(BlockBuilder output, Type type, Object value)
{
    if (type instanceof VarcharType) {
        type.writeSlice(output, utf8Slice(toVarcharValue(value)));
    }
    else if (type instanceof CharType) {
        type.writeSlice(output, truncateToLengthAndTrimSpaces(utf8Slice((String) value), ((CharType) type)));
    }
    else if (type.equals(OBJECT_ID)) {
        type.writeSlice(output, wrappedBuffer(((ObjectId) value).toByteArray()));
    }
    else if (type instanceof VarbinaryType) {
        if (value instanceof Binary) {
            type.writeSlice(output, wrappedBuffer(((Binary) value).getData()));
        }
        else {
            output.appendNull();
        }
    }
    else if (type instanceof DecimalType) {
        type.writeSlice(output, encodeScaledValue(((Decimal128) value).bigDecimalValue(), ((DecimalType) type).getScale()));
    }
    else {
        throw new PrestoException(GENERIC_INTERNAL_ERROR, "Unhandled type for Slice: " + type.getTypeSignature());
    }
}
 
源代码14 项目: presto   文件: ElasticsearchClient.java
public List<String> getAliases()
{
    return doRequest("/_aliases", body -> {
        try {
            ImmutableList.Builder<String> result = ImmutableList.builder();
            JsonNode root = OBJECT_MAPPER.readTree(body);

            Iterator<JsonNode> elements = root.elements();
            while (elements.hasNext()) {
                JsonNode element = elements.next();
                JsonNode aliases = element.get("aliases");
                result.addAll(aliases.fieldNames());
            }
            return result.build();
        }
        catch (IOException e) {
            throw new PrestoException(ELASTICSEARCH_INVALID_RESPONSE, e);
        }
    });
}
 
源代码15 项目: presto   文件: RubixInitializer.java
private void waitForCoordinator()
{
    try {
        coordinatorRetryDriver.run(
                "waitForCoordinator",
                () -> {
                    if (nodeManager.getAllNodes().stream().noneMatch(Node::isCoordinator)) {
                        // This exception will only be propagated when timeout is reached.
                        throw new PrestoException(GENERIC_INTERNAL_ERROR, "No coordinator node available");
                    }
                    return null;
                });
    }
    catch (Exception exception) {
        propagateIfPossible(exception, PrestoException.class);
        throw new RuntimeException(exception);
    }
}
 
源代码16 项目: presto   文件: BackgroundHiveSplitLoader.java
public static Optional<BucketSplitInfo> createBucketSplitInfo(Optional<HiveBucketHandle> bucketHandle, Optional<HiveBucketFilter> bucketFilter)
{
    requireNonNull(bucketHandle, "bucketHandle is null");
    requireNonNull(bucketFilter, "buckets is null");

    if (bucketHandle.isEmpty()) {
        checkArgument(bucketFilter.isEmpty(), "bucketHandle must be present if bucketFilter is present");
        return Optional.empty();
    }

    int tableBucketCount = bucketHandle.get().getTableBucketCount();
    int readBucketCount = bucketHandle.get().getReadBucketCount();

    if (tableBucketCount != readBucketCount && bucketFilter.isPresent()) {
        // TODO: remove when supported
        throw new PrestoException(NOT_SUPPORTED, "Filter on \"$bucket\" is not supported when the table has partitions with different bucket counts");
    }

    List<HiveColumnHandle> bucketColumns = bucketHandle.get().getColumns();
    IntPredicate predicate = bucketFilter
            .<IntPredicate>map(filter -> filter.getBucketsToKeep()::contains)
            .orElse(bucket -> true);
    return Optional.of(new BucketSplitInfo(bucketColumns, tableBucketCount, readBucketCount, predicate));
}
 
源代码17 项目: presto   文件: ParquetFileWriter.java
@Override
public void commit()
{
    try {
        parquetWriter.close();
    }
    catch (IOException | UncheckedIOException e) {
        try {
            rollbackAction.call();
        }
        catch (Exception ignored) {
            // ignore
        }
        throw new PrestoException(HIVE_WRITER_CLOSE_ERROR, "Error committing write parquet to Hive", e);
    }
}
 
源代码18 项目: presto   文件: HiveUtil.java
public static InputFormat<?, ?> getInputFormat(Configuration configuration, Properties schema, boolean symlinkTarget)
{
    String inputFormatName = getInputFormatName(schema);
    try {
        JobConf jobConf = toJobConf(configuration);
        configureCompressionCodecs(jobConf);

        Class<? extends InputFormat<?, ?>> inputFormatClass = getInputFormatClass(jobConf, inputFormatName);
        if (symlinkTarget && inputFormatClass == SymlinkTextInputFormat.class) {
            // Symlink targets are assumed to be TEXTFILE unless serde indicates otherwise.
            inputFormatClass = TextInputFormat.class;
            if (isDeserializerClass(schema, AvroSerDe.class)) {
                inputFormatClass = AvroContainerInputFormat.class;
            }
        }

        return ReflectionUtils.newInstance(inputFormatClass, jobConf);
    }
    catch (ClassNotFoundException | RuntimeException e) {
        throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Unable to create input format " + inputFormatName, e);
    }
}
 
源代码19 项目: presto   文件: TestAvroDecoder.java
@Test
public void testSchemaEvolutionToIncompatibleType()
{
    byte[] originalIntData = buildAvroData(getFieldBuilder()
                    .name("int_to_string_field").type().intType().noDefault()
                    .endRecord(),
            "int_to_string_field", 100);

    DecoderTestColumnHandle stringColumnReadingIntData = new DecoderTestColumnHandle(0, "row0", VARCHAR, "int_to_string_field", null, null, false, false, false);
    String changedTypeSchema = getFieldBuilder()
            .name("int_to_string_field").type().stringType().noDefault()
            .endRecord()
            .toString();

    assertThatThrownBy(() -> decodeRow(originalIntData, ImmutableSet.of(stringColumnReadingIntData), ImmutableMap.of(DATA_SCHEMA, changedTypeSchema)))
            .isInstanceOf(PrestoException.class)
            .hasCauseExactlyInstanceOf(AvroTypeException.class)
            .hasStackTraceContaining("Found int, expecting string")
            .hasMessageMatching("Decoding Avro record failed.");
}
 
源代码20 项目: presto   文件: FunctionResolver.java
private Optional<ResolvedFunction> matchFunction(Collection<FunctionMetadata> candidates, List<TypeSignatureProvider> parameters, boolean coercionAllowed)
{
    List<ApplicableFunction> applicableFunctions = identifyApplicableFunctions(candidates, parameters, coercionAllowed);
    if (applicableFunctions.isEmpty()) {
        return Optional.empty();
    }

    if (coercionAllowed) {
        applicableFunctions = selectMostSpecificFunctions(applicableFunctions, parameters);
        checkState(!applicableFunctions.isEmpty(), "at least single function must be left");
    }

    if (applicableFunctions.size() == 1) {
        return Optional.of(getOnlyElement(applicableFunctions).getResolvedFunction());
    }

    StringBuilder errorMessageBuilder = new StringBuilder();
    errorMessageBuilder.append("Could not choose a best candidate operator. Explicit type casts must be added.\n");
    errorMessageBuilder.append("Candidates are:\n");
    for (ApplicableFunction function : applicableFunctions) {
        errorMessageBuilder.append("\t * ");
        errorMessageBuilder.append(function.getBoundSignature());
        errorMessageBuilder.append("\n");
    }
    throw new PrestoException(AMBIGUOUS_FUNCTION_CALL, errorMessageBuilder.toString());
}
 
源代码21 项目: pulsar   文件: PulsarMetadata.java
@Override
public List<String> listSchemaNames(ConnectorSession session) {
    List<String> prestoSchemas = new LinkedList<>();
    try {
        List<String> tenants = pulsarAdmin.tenants().getTenants();
        for (String tenant : tenants) {
            prestoSchemas.addAll(pulsarAdmin.namespaces().getNamespaces(tenant).stream().map(namespace ->
                rewriteNamespaceDelimiterIfNeeded(namespace, pulsarConnectorConfig)).collect(Collectors.toList()));
        }
    } catch (PulsarAdminException e) {
        if (e.getStatusCode() == 401) {
            throw new PrestoException(QUERY_REJECTED, "Failed to get schemas from pulsar: Unauthorized");
        }
        throw new RuntimeException("Failed to get schemas from pulsar: "
                + ExceptionUtils.getRootCause(e).getLocalizedMessage(), e);
    }
    return prestoSchemas;
}
 
源代码22 项目: presto   文件: SyncPartitionMetadataProcedure.java
private static List<FileStatus> listDirectory(FileSystem fileSystem, FileStatus current, List<Column> partitionColumns, int depth, boolean caseSensitive)
{
    if (depth == 0) {
        return ImmutableList.of(current);
    }

    try {
        return Stream.of(fileSystem.listStatus(current.getPath()))
                .filter(fileStatus -> isValidPartitionPath(fileStatus, partitionColumns.get(partitionColumns.size() - depth), caseSensitive))
                .flatMap(directory -> listDirectory(fileSystem, directory, partitionColumns, depth - 1, caseSensitive).stream())
                .collect(toImmutableList());
    }
    catch (IOException e) {
        throw new PrestoException(HIVE_FILESYSTEM_ERROR, e);
    }
}
 
源代码23 项目: presto   文件: VarbinaryFunctions.java
@Description("Decode hex encoded binary data")
@ScalarFunction("from_hex")
@LiteralParameters("x")
@SqlType(StandardTypes.VARBINARY)
public static Slice fromHexVarchar(@SqlType("varchar(x)") Slice slice)
{
    if (slice.length() % 2 != 0) {
        throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "invalid input length " + slice.length());
    }

    byte[] result = new byte[slice.length() / 2];
    for (int i = 0; i < slice.length(); i += 2) {
        result[i / 2] = (byte) ((hexDigitCharToInt(slice.getByte(i)) << 4) | hexDigitCharToInt(slice.getByte(i + 1)));
    }
    return Slices.wrappedBuffer(result);
}
 
源代码24 项目: presto   文件: SystemSessionProperties.java
private static Integer validateIntegerValue(Object value, String property, int lowerBoundIncluded, boolean allowNull)
{
    if (value == null && !allowNull) {
        throw new PrestoException(INVALID_SESSION_PROPERTY, format("%s must be non-null", property));
    }

    if (value == null) {
        return null;
    }

    int intValue = ((Number) value).intValue();
    if (intValue < lowerBoundIncluded) {
        throw new PrestoException(INVALID_SESSION_PROPERTY, format("%s must be equal or greater than %s", property, lowerBoundIncluded));
    }
    return intValue;
}
 
源代码25 项目: presto   文件: PrometheusClient.java
static Optional<String> getBearerAuthInfoFromFile()
        throws URISyntaxException
{
    return new PrometheusConnectorConfig().getBearerTokenFile()
            .map(tokenFileName -> {
                try {
                    File tokenFile = tokenFileName;
                    return Optional.of(Files.toString(tokenFile, UTF_8));
                }
                catch (Exception e) {
                    throw new PrestoException(NOT_FOUND, "Failed to find/read file: " + tokenFileName, e);
                }
            }).orElse(Optional.empty());
}
 
源代码26 项目: presto   文件: InMemoryTransactionManager.java
public synchronized void checkConnectorWrite(CatalogName catalogName)
{
    checkOpenTransaction();
    ConnectorTransactionMetadata transactionMetadata = connectorIdToMetadata.get(catalogName);
    checkArgument(transactionMetadata != null, "Cannot record write for connector not part of transaction");
    if (readOnly) {
        throw new PrestoException(READ_ONLY_VIOLATION, "Cannot execute write in a read-only transaction");
    }
    if (!writtenConnectorId.compareAndSet(null, catalogName) && !writtenConnectorId.get().equals(catalogName)) {
        throw new PrestoException(MULTI_CATALOG_WRITE_CONFLICT, "Multi-catalog writes not supported in a single transaction. Already wrote to catalog " + writtenConnectorId.get());
    }
    if (transactionMetadata.isSingleStatementWritesOnly() && !autoCommitContext) {
        throw new PrestoException(AUTOCOMMIT_WRITE_CONFLICT, "Catalog " + catalogName + " only supports writes using autocommit");
    }
}
 
源代码27 项目: presto   文件: GeoFunctions.java
private static OGCGeometry geomFromBinary(Slice input)
{
    requireNonNull(input, "input is null");
    OGCGeometry geometry;
    try {
        geometry = OGCGeometry.fromBinary(input.toByteBuffer().slice());
    }
    catch (IllegalArgumentException | IndexOutOfBoundsException e) {
        throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Invalid WKB", e);
    }
    geometry.setSpatialReference(null);
    return geometry;
}
 
源代码28 项目: presto   文件: PrometheusRecordCursor.java
@Override
public long getLong(int field)
{
    Type type = getType(field);
    if (type.equals(TIMESTAMP)) {
        return ((Timestamp) getFieldValue(field)).toInstant().toEpochMilli();
    }
    else {
        throw new PrestoException(NOT_SUPPORTED, "Unsupported type " + getType(field));
    }
}
 
源代码29 项目: presto   文件: TinyintOperators.java
@ScalarOperator(MULTIPLY)
@SqlType(StandardTypes.TINYINT)
public static long multiply(@SqlType(StandardTypes.TINYINT) long left, @SqlType(StandardTypes.TINYINT) long right)
{
    try {
        return SignedBytes.checkedCast(left * right);
    }
    catch (IllegalArgumentException e) {
        throw new PrestoException(NUMERIC_VALUE_OUT_OF_RANGE, format("tinyint multiplication overflow: %s * %s", left, right), e);
    }
}
 
源代码30 项目: presto   文件: GeoFunctions.java
@Description("Converts a Geometry object to a SphericalGeography object")
@ScalarFunction("to_spherical_geography")
@SqlType(SPHERICAL_GEOGRAPHY_TYPE_NAME)
public static Slice toSphericalGeography(@SqlType(GEOMETRY_TYPE_NAME) Slice input)
{
    // "every point in input is in range" <=> "the envelope of input is in range"
    Envelope envelope = deserializeEnvelope(input);
    if (!envelope.isEmpty()) {
        checkLatitude(envelope.getYMin());
        checkLatitude(envelope.getYMax());
        checkLongitude(envelope.getXMin());
        checkLongitude(envelope.getXMax());
    }
    OGCGeometry geometry = deserialize(input);
    if (geometry.is3D()) {
        throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Cannot convert 3D geometry to a spherical geography");
    }

    GeometryCursor cursor = geometry.getEsriGeometryCursor();
    while (true) {
        com.esri.core.geometry.Geometry subGeometry = cursor.next();
        if (subGeometry == null) {
            break;
        }

        if (!GEOMETRY_TYPES_FOR_SPHERICAL_GEOGRAPHY.contains(subGeometry.getType())) {
            throw new PrestoException(INVALID_FUNCTION_ARGUMENT, "Cannot convert geometry of this type to spherical geography: " + subGeometry.getType());
        }
    }

    return input;
}