|
@@ -1,1146 +0,0 @@
|
|
|
-package kr.co.iya.base.support.util;
|
|
|
-
|
|
|
-import java.io.File;
|
|
|
-import java.io.FileInputStream;
|
|
|
-import java.io.IOException;
|
|
|
-import java.io.OutputStream;
|
|
|
-import java.nio.file.Paths;
|
|
|
-import java.text.SimpleDateFormat;
|
|
|
-import java.time.ZoneId;
|
|
|
-import java.util.ArrayList;
|
|
|
-import java.util.Date;
|
|
|
-import java.util.HashMap;
|
|
|
-import java.util.HashSet;
|
|
|
-import java.util.List;
|
|
|
-import java.util.Map;
|
|
|
-import java.util.Set;
|
|
|
-import java.util.UUID;
|
|
|
-import java.util.concurrent.TimeUnit;
|
|
|
-
|
|
|
-import org.apache.commons.io.FilenameUtils;
|
|
|
-import org.apache.commons.lang3.StringUtils;
|
|
|
-import org.threeten.bp.Duration;
|
|
|
-
|
|
|
-import com.google.api.gax.paging.Page;
|
|
|
-import com.google.cloud.RetryOption;
|
|
|
-import com.google.cloud.bigquery.BigQuery;
|
|
|
-import com.google.cloud.bigquery.BigQuery.DatasetListOption;
|
|
|
-import com.google.cloud.bigquery.BigQueryException;
|
|
|
-import com.google.cloud.bigquery.CsvOptions;
|
|
|
-import com.google.cloud.bigquery.Dataset;
|
|
|
-import com.google.cloud.bigquery.DatasetId;
|
|
|
-import com.google.cloud.bigquery.DatasetInfo;
|
|
|
-import com.google.cloud.bigquery.ExtractJobConfiguration;
|
|
|
-import com.google.cloud.bigquery.Job;
|
|
|
-import com.google.cloud.bigquery.JobId;
|
|
|
-import com.google.cloud.bigquery.JobInfo;
|
|
|
-import com.google.cloud.bigquery.JobInfo.WriteDisposition;
|
|
|
-import com.google.cloud.bigquery.LoadJobConfiguration;
|
|
|
-import com.google.cloud.bigquery.QueryJobConfiguration;
|
|
|
-import com.google.cloud.bigquery.Schema;
|
|
|
-import com.google.cloud.bigquery.StandardTableDefinition;
|
|
|
-import com.google.cloud.bigquery.Table;
|
|
|
-import com.google.cloud.bigquery.TableDefinition;
|
|
|
-import com.google.cloud.bigquery.TableId;
|
|
|
-import com.google.cloud.bigquery.TableInfo;
|
|
|
-import com.google.cloud.bigquery.TableResult;
|
|
|
-import com.google.cloud.spring.autoconfigure.core.GcpProperties;
|
|
|
-import com.google.cloud.storage.Blob;
|
|
|
-import com.google.cloud.storage.BlobId;
|
|
|
-import com.google.cloud.storage.BlobInfo;
|
|
|
-import com.google.cloud.storage.Bucket;
|
|
|
-import com.google.cloud.storage.Storage;
|
|
|
-
|
|
|
-import kr.co.iya.base.context.GcpContext;
|
|
|
-import kr.co.iya.base.context.GcpContext.BigQueryHandler;
|
|
|
-import kr.co.iya.base.context.GcpContext.BucketObjectInfo;
|
|
|
-import kr.co.iya.base.context.GcpContext.StorageHandler;
|
|
|
-import kr.co.iya.base.exception.SystemException;
|
|
|
-import kr.co.iya.base.support.aid.BeanAidPack.BeanAid;
|
|
|
-import kr.co.iya.base.support.util.SystemUtilPack.SystemUtil;
|
|
|
-import lombok.extern.slf4j.Slf4j;
|
|
|
-
|
|
|
-@Slf4j
|
|
|
-public final class GcpUtilPack {
|
|
|
-
|
|
|
- //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
|
|
-
|
|
|
- public interface GcpUtil {
|
|
|
-
|
|
|
- public GcpContext getContext();
|
|
|
-
|
|
|
- public StorageHandler getDefaultStorageHandler();
|
|
|
- public BigQueryHandler getDefaultBigQueryHandler();
|
|
|
-
|
|
|
- public StorageHandler getStorageHandler(String projectId);
|
|
|
- public BigQueryHandler getBigQueryHandler(String projectId);
|
|
|
- }
|
|
|
-
|
|
|
- //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
|
|
-
|
|
|
- public static GcpUtil getGcpUtil(BeanAid beanAid) {
|
|
|
-
|
|
|
- return new GcpUtil() {
|
|
|
-
|
|
|
- private GcpContext context = null;
|
|
|
- private Map<String,String> systemInfo = null;
|
|
|
-
|
|
|
- @Override
|
|
|
- public GcpContext getContext() {
|
|
|
-
|
|
|
- if(this.context == null) {
|
|
|
- this.context = beanAid.getBean(GcpContext.class);
|
|
|
- }
|
|
|
-
|
|
|
- if(this.context == null || !this.context.isEnabled()) {
|
|
|
-
|
|
|
- BigQuery defaultBigQuery = beanAid.getBean(BigQuery.class);
|
|
|
- //log.debug(">> defaultBigQuery: {}",defaultBigQuery == null?null:defaultBigQuery.hashCode());
|
|
|
-
|
|
|
- Storage defaultStorage = beanAid.getBean(Storage.class);
|
|
|
- //log.debug(">> defaultStorage: {}",defaultStorage == null?null:defaultStorage.hashCode());
|
|
|
-
|
|
|
- this.context.initContext(beanAid.getBean(GcpProperties.class), defaultBigQuery, defaultStorage);
|
|
|
-
|
|
|
- this.getDefaultBigQueryHandler();
|
|
|
- this.getDefaultStorageHandler();
|
|
|
- }
|
|
|
-
|
|
|
- return this.context;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public StorageHandler getDefaultStorageHandler() {
|
|
|
- return this.getStorageHandler(this.getContext().getDefaultProjectId());
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public BigQueryHandler getDefaultBigQueryHandler() {
|
|
|
- return this.getBigQueryHandler(this.getContext().getDefaultProjectId());
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public BigQueryHandler getBigQueryHandler(String projectId) {
|
|
|
- BigQueryHandler handler = this.getContext().getBigQueryHandler(projectId);
|
|
|
- if(handler != null) {return handler;}
|
|
|
-
|
|
|
- BigQuery bigQuery = this.getContext().getBigQuery(projectId);
|
|
|
- if(bigQuery == null) {return null;}
|
|
|
-
|
|
|
- this.getContext().setBigQueryHandler(projectId, this.buildBigQueryHandler(bigQuery));
|
|
|
- return this.getBigQueryHandler(projectId);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public StorageHandler getStorageHandler(String projectId){
|
|
|
- StorageHandler handler = this.getContext().getStorageHandler(projectId);
|
|
|
- if(handler != null) {return handler;}
|
|
|
-
|
|
|
- Storage storage = this.getContext().getStorage(projectId);
|
|
|
- if(storage == null) {return null;}
|
|
|
-
|
|
|
- this.getContext().setStorageHandler(projectId, this.buildStorageHandler(storage));
|
|
|
- return this.getStorageHandler(projectId);
|
|
|
- }
|
|
|
-
|
|
|
- private BigQueryHandler buildBigQueryHandler(BigQuery bigQuery) {
|
|
|
- return new BigQueryHandler() {
|
|
|
-
|
|
|
- @Override
|
|
|
- public BigQuery getBigQuery() {
|
|
|
- return bigQuery;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Map<String,Object> convert(Table table){
|
|
|
- if(table == null) {return null;}
|
|
|
- Map<String,Object> map = new HashMap<>();
|
|
|
- map.put("tableId", table.getTableId());
|
|
|
- map.put("etag", table.getEtag());
|
|
|
- map.put("generatedId", table.getGeneratedId());
|
|
|
- map.put("selfLink", table.getSelfLink());
|
|
|
- map.put("friendlyName", table.getFriendlyName());
|
|
|
- map.put("description", table.getDescription());
|
|
|
- map.put("expirationTime", table.getExpirationTime());
|
|
|
- map.put("creationTime", table.getCreationTime());
|
|
|
- map.put("lastModifiedTime", table.getLastModifiedTime());
|
|
|
- map.put("numBytes", table.getNumBytes());
|
|
|
- map.put("numLongTermBytes", table.getNumLongTermBytes());
|
|
|
- map.put("numTimeTravelPhysicalBytes", table.getNumTimeTravelPhysicalBytes());
|
|
|
- map.put("numTotalLogicalBytes", table.getNumTotalLogicalBytes());
|
|
|
- map.put("numActiveLogicalBytes", table.getNumActiveLogicalBytes());
|
|
|
- map.put("numLongTermLogicalBytes", table.getNumLongTermLogicalBytes());
|
|
|
- map.put("numTotalPhysicalBytes", table.getNumTotalPhysicalBytes());
|
|
|
- map.put("numActivePhysicalBytes", table.getNumActivePhysicalBytes());
|
|
|
- map.put("numLongTermPhysicalBytes", table.getNumLongTermPhysicalBytes());
|
|
|
- map.put("numRows", table.getNumRows());
|
|
|
- map.put("definition", table.getDefinition().toString());
|
|
|
- map.put("encryptionConfiguration", table.getEncryptionConfiguration());
|
|
|
- map.put("labels", table.getLabels());
|
|
|
- map.put("requirePartitionFilter", table.getRequirePartitionFilter());
|
|
|
- map.put("defaultCollation", table.getDefaultCollation());
|
|
|
- map.put("cloneDefinition", table.getCloneDefinition());
|
|
|
- map.put("tableConstraints", table.getTableConstraints());
|
|
|
- return map;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Map<String,Object> convert(Blob blob){
|
|
|
- Map<String,Object> map = new HashMap<>();
|
|
|
- map.put("systemInfo",getSystemInfo());
|
|
|
- map.put("bucket",blob.getBucket());
|
|
|
- map.put("path",blob.getName());
|
|
|
- map.put("name",extractFileName(blob.getName()));
|
|
|
- map.put("size",blob.getSize());
|
|
|
- map.put("directory",blob.isDirectory());
|
|
|
- map.put("createTime",blob.getCreateTimeOffsetDateTime() == null?null:blob.getCreateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());
|
|
|
- map.put("updateTime",blob.getUpdateTimeOffsetDateTime() == null?null:blob.getUpdateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());
|
|
|
- map.put("contentType",blob.getContentType());
|
|
|
- map.put("downloadStorePath","");
|
|
|
- map.put("downloadTime","");
|
|
|
- map.put("metadata",blob.getMetadata());
|
|
|
- return map;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Map<String,Object> convert(Dataset dataset) {
|
|
|
- if(dataset == null) {return null;}
|
|
|
- Map<String,Object> map = new HashMap<>();
|
|
|
- map.put("datasetId", dataset.getDatasetId());
|
|
|
- map.put("creationTime", dataset.getCreationTime());
|
|
|
- map.put("defaultTableLifetime", dataset.getDefaultTableLifetime());
|
|
|
- map.put("description", dataset.getDescription());
|
|
|
- map.put("etag", dataset.getEtag());
|
|
|
- map.put("friendlyName", dataset.getFriendlyName());
|
|
|
- map.put("generatedId", dataset.getGeneratedId());
|
|
|
- map.put("lastModified", dataset.getLastModified());
|
|
|
- map.put("location", dataset.getLocation());
|
|
|
- map.put("selfLink", dataset.getSelfLink());
|
|
|
- map.put("acl", dataset.getAcl());
|
|
|
- map.put("labels", dataset.getLabels());
|
|
|
- map.put("defaultEncryptionConfiguration", dataset.getDefaultEncryptionConfiguration());
|
|
|
- map.put("defaultPartitionExpirationMs", dataset.getDefaultPartitionExpirationMs());
|
|
|
- map.put("defaultCollation", dataset.getDefaultCollation());
|
|
|
- map.put("externalDatasetReference", dataset.getExternalDatasetReference());
|
|
|
- map.put("storageBillingModel", dataset.getStorageBillingModel());
|
|
|
- return map;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Map<String,Object>> convert(List<?> list){
|
|
|
- if(list == null) {return null;}
|
|
|
- List<Map<String,Object>> result = new ArrayList<>();
|
|
|
- list.forEach(i->{
|
|
|
- if(i instanceof Dataset){result.add(this.convert((Dataset)i));}
|
|
|
- if(i instanceof Table){result.add(this.convert((Table)i));}
|
|
|
- if(i instanceof Blob){result.add(this.convert((Blob)i));}
|
|
|
- });
|
|
|
- return result;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Dataset> listDatasets(String projectId) {
|
|
|
- Page<Dataset> datasets = bigQuery.listDatasets(projectId,DatasetListOption.all());
|
|
|
- List<Dataset> list = new ArrayList<>();
|
|
|
- datasets.iterateAll().forEach(dataset->{list.add(dataset);});
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Dataset getDataset(String projectId, String datasetName) {
|
|
|
- return bigQuery.getDataset(DatasetId.of(projectId,datasetName));
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public boolean isExistDataset(String projectId, String datasetName) {
|
|
|
- return this.getDataset(projectId,datasetName) == null?false:true;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Dataset createDataset(String projectId, String datasetName) {
|
|
|
- if(this.isExistDataset(projectId, datasetName)) {throw new SystemException("dataset("+datasetName+") is already exists.");}
|
|
|
- return bigQuery.create(DatasetInfo.newBuilder(projectId,datasetName).build());
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Table> listTables(String projectId, String datasetName) {
|
|
|
- if(!this.isExistDataset(projectId, datasetName)) {throw new SystemException("dataset("+datasetName+") is not found.");}
|
|
|
- Page<Table> tables = bigQuery.listTables(DatasetId.of(projectId, datasetName));
|
|
|
- List<Table> list = new ArrayList<>();
|
|
|
- tables.iterateAll().forEach(table->{list.add(table);});
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table getTable(String projectId, String datasetName, String tableName) {
|
|
|
- return bigQuery.getTable(TableId.of(projectId, datasetName, tableName));
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public boolean isExistTable(String projectId, String datasetName, String tableName) {
|
|
|
- return this.getTable(projectId, datasetName, tableName) == null?false:true;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table createTable(String projectId, String datasetName, String tableName, Schema schema) {
|
|
|
- if(this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is already exists.");}
|
|
|
- TableId tableId = TableId.of(projectId, datasetName, tableName);
|
|
|
- TableDefinition tableDefinition = StandardTableDefinition.of(schema == null?Schema.of():schema);
|
|
|
- return bigQuery.create(TableInfo.newBuilder(tableId, tableDefinition).build());
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table createTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri) {
|
|
|
- return this.createTableFromCsv(projectId, datasetName, tableName, null, bucketUri);
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table createTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri) {
|
|
|
- return this.createTableFromCsv(projectId, datasetName, tableName, schema, bucketUri, null);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table createTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri, CsvOptions csvOptions) {
|
|
|
- return this._workTableFromCsv(projectId, datasetName, tableName, schema, bucketUri, csvOptions, null);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public TableResult executeQuery(String projectId, String datasetName, String query, boolean isLegacySql) {
|
|
|
- TableResult result = null;
|
|
|
- try{
|
|
|
- QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query)
|
|
|
- .setUseLegacySql(isLegacySql)
|
|
|
- .build();
|
|
|
-
|
|
|
- JobId jobId = JobId.of(UUID.randomUUID().toString());
|
|
|
- Job queryJob = bigQuery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()).waitFor();
|
|
|
-
|
|
|
- if(queryJob == null) {
|
|
|
- throw new RuntimeException("Job no longer exists");
|
|
|
- }
|
|
|
-
|
|
|
- if(queryJob.getStatus().getError() != null) {
|
|
|
- throw new RuntimeException(queryJob.getStatus().getError().toString());
|
|
|
- }
|
|
|
-
|
|
|
- result = queryJob.getQueryResults();
|
|
|
-
|
|
|
- } catch (BigQueryException | InterruptedException e) {
|
|
|
- throw new SystemException(e.getMessage());
|
|
|
- }
|
|
|
-
|
|
|
- return result;
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table createTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql) {
|
|
|
- if(this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is already exists.");}
|
|
|
- return this._workTableFromQuery(projectId, datasetName, tableName, WriteDisposition.WRITE_TRUNCATE, query, isLegacySql);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table truncateTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri) {
|
|
|
- return this.truncateTableFromCsv(projectId, datasetName, tableName, bucketUri, null);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table truncateTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri, CsvOptions csvOptions) {
|
|
|
- return this._workTableFromCsv(projectId, datasetName, tableName, null, bucketUri, csvOptions, WriteDisposition.WRITE_TRUNCATE);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table truncateTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql) {
|
|
|
- if(!this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is not exist.");}
|
|
|
- return this._workTableFromQuery(projectId, datasetName, tableName, WriteDisposition.WRITE_TRUNCATE, query, isLegacySql);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table appendTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri) {
|
|
|
- return this.appendTableFromCsv(projectId, datasetName, tableName, bucketUri, null);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table appendTableFromCsv(String projectId, String datasetName, String tableName, String bucketUri, CsvOptions csvOptions) {
|
|
|
- return this._workTableFromCsv(projectId, datasetName, tableName, null, bucketUri, csvOptions, WriteDisposition.WRITE_APPEND);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Table appendTableFromQuery(String projectId, String datasetName, String tableName, String query, boolean isLegacySql) {
|
|
|
- if(!this.isExistTable(projectId, datasetName, tableName)) {throw new SystemException("table("+tableName+") is not exist.");}
|
|
|
- return this._workTableFromQuery(projectId, datasetName, tableName, WriteDisposition.WRITE_APPEND, query, isLegacySql);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri) {
|
|
|
- return this.extractQueryToCsv(projectId, datasetName, query, isLegacySql, bucketUri, ",");
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter) {
|
|
|
- return this.extractQueryToCsv(projectId, datasetName, query, isLegacySql, bucketUri, fieldDelimiter, true);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter, boolean printHeader) {
|
|
|
- return this.extractQueryToCsv(projectId, datasetName, query, isLegacySql, bucketUri, fieldDelimiter, printHeader, false);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractQueryToCsv(String projectId, String datasetName, String query, boolean isLegacySql, String bucketUri, String fieldDelimiter, boolean printHeader, boolean isCompressed) {
|
|
|
- Table table = null;
|
|
|
- List<Blob> list = null;
|
|
|
- try{
|
|
|
- String tableName = "tb_utcb"+(new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date(System.currentTimeMillis()))).toString();
|
|
|
- table = this.createTableFromQuery(projectId, datasetName, tableName, query, isLegacySql);
|
|
|
- list = this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, fieldDelimiter, printHeader, isCompressed);
|
|
|
- } catch(Exception e) {
|
|
|
- throw e;
|
|
|
- } finally {
|
|
|
- if(table != null) {table.delete();}
|
|
|
- }
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri) {
|
|
|
- return this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, null);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter) {
|
|
|
- return this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, fieldDelimiter, true);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter, boolean printHeader) {
|
|
|
- return this.extractTableToCsv(projectId, datasetName, tableName, bucketUri, fieldDelimiter, printHeader);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> extractTableToCsv(String projectId, String datasetName, String tableName, String bucketUri, String fieldDelimiter, boolean printHeader, boolean isCompressed) {
|
|
|
- return this._workTableToBucket(projectId, datasetName, tableName, bucketUri, "CSV", fieldDelimiter, printHeader, isCompressed?"gzip":null);
|
|
|
- }
|
|
|
-
|
|
|
- private Table _workTableFromQuery(String projectId, String datasetName, String tableName, WriteDisposition writeDisposition, String query, boolean isLegacySql) {
|
|
|
- try{
|
|
|
- TableId tableId = TableId.of(projectId, datasetName, tableName);
|
|
|
- log.debug(">> tableId:{}",tableId);
|
|
|
-
|
|
|
- QueryJobConfiguration queryConfig = null;
|
|
|
-
|
|
|
- if(writeDisposition == null) {
|
|
|
- queryConfig = QueryJobConfiguration.newBuilder(query)
|
|
|
- .setDestinationTable(tableId)
|
|
|
- .setAllowLargeResults(true)
|
|
|
- .setWriteDisposition(WriteDisposition.WRITE_EMPTY)
|
|
|
- .setUseLegacySql(isLegacySql)
|
|
|
- .build();
|
|
|
- }
|
|
|
- if(WriteDisposition.WRITE_TRUNCATE.equals(writeDisposition)) {
|
|
|
- queryConfig = QueryJobConfiguration.newBuilder(query)
|
|
|
- .setDestinationTable(tableId)
|
|
|
- .setAllowLargeResults(true)
|
|
|
- .setWriteDisposition(WriteDisposition.WRITE_TRUNCATE)
|
|
|
- .setUseLegacySql(isLegacySql)
|
|
|
- .build();
|
|
|
- }
|
|
|
- if(WriteDisposition.WRITE_APPEND.equals(writeDisposition)) {
|
|
|
- queryConfig = QueryJobConfiguration.newBuilder(query)
|
|
|
- .setDestinationTable(tableId)
|
|
|
- .setAllowLargeResults(true)
|
|
|
- .setWriteDisposition(WriteDisposition.WRITE_APPEND)
|
|
|
- .setUseLegacySql(isLegacySql)
|
|
|
- .build();
|
|
|
- }
|
|
|
-
|
|
|
- TableResult result = bigQuery.query(queryConfig);
|
|
|
- log.debug(">> extract rows:{}",result.getTotalRows());
|
|
|
-
|
|
|
- } catch (BigQueryException | InterruptedException e) {
|
|
|
- throw new SystemException(e.getMessage());
|
|
|
- }
|
|
|
-
|
|
|
- return this.getTable(projectId, datasetName, tableName);
|
|
|
- }
|
|
|
-
|
|
|
- private Table _workTableFromCsv(String projectId, String datasetName, String tableName, Schema schema, String bucketUri, CsvOptions csvOptions, WriteDisposition writeDisposition) {
|
|
|
- try{
|
|
|
- TableId tableId = TableId.of(projectId, datasetName, tableName);
|
|
|
- log.debug(">> tableId:{}",tableId);
|
|
|
-
|
|
|
- String sourceUri = bucketUri.startsWith("gs://")?bucketUri:"gs://"+bucketUri;
|
|
|
-
|
|
|
- LoadJobConfiguration configuration = null;
|
|
|
-
|
|
|
- if(writeDisposition == null) {
|
|
|
- configuration = LoadJobConfiguration.newBuilder(tableId, sourceUri)
|
|
|
- .setFormatOptions(csvOptions == null?CsvOptions.newBuilder().setSkipLeadingRows(1).build():csvOptions)
|
|
|
- .setWriteDisposition(WriteDisposition.WRITE_EMPTY)
|
|
|
- .setSchema(schema)
|
|
|
- .setAutodetect(schema == null?true:false)
|
|
|
- .build();
|
|
|
- }
|
|
|
- if(WriteDisposition.WRITE_TRUNCATE.equals(writeDisposition)) {
|
|
|
- configuration = LoadJobConfiguration.builder(tableId, sourceUri)
|
|
|
- .setFormatOptions(csvOptions == null?CsvOptions.newBuilder().setSkipLeadingRows(1).build():csvOptions)
|
|
|
- .setWriteDisposition(WriteDisposition.WRITE_TRUNCATE)
|
|
|
- .build();
|
|
|
- }
|
|
|
- if(WriteDisposition.WRITE_APPEND.equals(writeDisposition)) {
|
|
|
- configuration = LoadJobConfiguration.builder(tableId, sourceUri)
|
|
|
- .setFormatOptions(csvOptions == null?CsvOptions.newBuilder().setSkipLeadingRows(1).build():csvOptions)
|
|
|
- .setWriteDisposition(WriteDisposition.WRITE_APPEND)
|
|
|
- .build();
|
|
|
- }
|
|
|
-
|
|
|
- Job job = bigQuery.create(JobInfo.of(configuration))
|
|
|
- .waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),RetryOption.totalTimeout(Duration.ofMinutes(60)));
|
|
|
-
|
|
|
- if (job == null) {
|
|
|
- log.debug(">> job not executed since it no longer exists.");
|
|
|
- return null;
|
|
|
- }
|
|
|
- if (job.getStatus().getError() != null) {
|
|
|
- log.debug(">> bigQuery was unable to load into the table due to an error:"+ job.getStatus().getError());
|
|
|
- return null;
|
|
|
- }
|
|
|
- log.debug(">> data(CVS) load successful");
|
|
|
-
|
|
|
- } catch (BigQueryException | InterruptedException e) {
|
|
|
- throw new SystemException(e.getMessage());
|
|
|
- }
|
|
|
-
|
|
|
- return this.getTable(projectId, datasetName, tableName);
|
|
|
- }
|
|
|
-
|
|
|
- private List<Blob> _workTableToBucket(String projectId, String datasetName, String tableName, String bucketUri, String dataFormat, String fieldDelimiter, Boolean printHeader, String compressed){
|
|
|
- try{
|
|
|
- TableId tableId = TableId.of(projectId, datasetName, tableName);
|
|
|
- String targetUri = bucketUri.startsWith("gs://")?bucketUri:"gs://"+bucketUri;
|
|
|
-
|
|
|
- ExtractJobConfiguration configuration = ExtractJobConfiguration.newBuilder(tableId, targetUri)
|
|
|
- .setFieldDelimiter(fieldDelimiter == null?",":fieldDelimiter)
|
|
|
- .setPrintHeader(printHeader)
|
|
|
- .setCompression(compressed)
|
|
|
- .setFormat(dataFormat)
|
|
|
- .build();
|
|
|
-
|
|
|
- Job job = bigQuery.create(JobInfo.of(configuration))
|
|
|
- .waitFor(RetryOption.initialRetryDelay(Duration.ofSeconds(1)),RetryOption.totalTimeout(Duration.ofMinutes(60)));
|
|
|
-
|
|
|
- if (job == null) {
|
|
|
- log.debug(">> job not executed since it no longer exists.");
|
|
|
- return null;
|
|
|
- }
|
|
|
-
|
|
|
- if (job.getStatus().getError() != null) {
|
|
|
- log.debug(">> bigQuery was unable to extract due to an error: \n" + job.getStatus().getError());
|
|
|
- return null;
|
|
|
- }
|
|
|
- log.debug(">> Table export successful");
|
|
|
-
|
|
|
- } catch (BigQueryException | InterruptedException e) {
|
|
|
- throw new SystemException(e.getMessage());
|
|
|
- }
|
|
|
-
|
|
|
- if(bucketUri.startsWith("gs://")) {bucketUri = bucketUri.substring("gs://".length());}
|
|
|
- String bucket = bucketUri.indexOf("/")!=-1?bucketUri.substring(0,bucketUri.indexOf("/")):bucketUri;
|
|
|
- String remotePath = bucketUri.indexOf("/")!=-1?bucketUri.substring(bucketUri.indexOf("/")+1):"";
|
|
|
-
|
|
|
- List<Blob> list = getDefaultStorageHandler().listObjects(bucket,remotePath);
|
|
|
-
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- };
|
|
|
- }
|
|
|
-
|
|
|
- private StorageHandler buildStorageHandler(Storage storage) {
|
|
|
- return new StorageHandler() {
|
|
|
-
|
|
|
- @Override
|
|
|
- public Storage getStorage() {
|
|
|
- return storage;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Map<String,Object> convert(Blob blob){
|
|
|
- Map<String,Object> map = new HashMap<>();
|
|
|
- map.put("systemInfo",getSystemInfo());
|
|
|
- map.put("bucket",blob.getBucket());
|
|
|
- map.put("path",blob.getName());
|
|
|
- map.put("name",extractFileName(blob.getName()));
|
|
|
- map.put("size",blob.getSize());
|
|
|
- map.put("directory",blob.isDirectory());
|
|
|
- map.put("createTime",blob.getCreateTimeOffsetDateTime() == null?null:blob.getCreateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());
|
|
|
- map.put("updateTime",blob.getUpdateTimeOffsetDateTime() == null?null:blob.getUpdateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()).toString());
|
|
|
- map.put("contentType",blob.getContentType());
|
|
|
- map.put("downloadStorePath","");
|
|
|
- map.put("downloadTime","");
|
|
|
- map.put("metadata",blob.getMetadata());
|
|
|
-
|
|
|
- return map;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Map<String,Object>> convert(List<Blob> list){
|
|
|
- if(list == null) {return null;}
|
|
|
- List<Map<String,Object>> result = new ArrayList<>();
|
|
|
- list.forEach(i->result.add(this.convert(i)));
|
|
|
- return result;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public boolean isExistBucket(String bucket) {
|
|
|
- return this.getBucket(bucket) == null?false:true;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public boolean isExistObject(String bucket, String remotePath) {
|
|
|
- return this.getObject(bucket, remotePath) == null?false:true;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public boolean isDirectory(String bucket, String remotePath) {
|
|
|
- Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));
|
|
|
- //log.debug(">> [isDirectory] blob:{}",blob);
|
|
|
- if(blob != null && !blob.isDirectory()) {return false;}
|
|
|
- return true;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Bucket getBucket(String bucket) {
|
|
|
- return storage.get(bucket);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Blob getObject(String bucket, String remotePath) {
|
|
|
- return storage.get(bucket, remotePath);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetRemotePath) {
|
|
|
- return this.move(sourceBucket, sourceRemotePath, sourceBucket, targetRemotePath);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetRemotePath, boolean isOverWrite){
|
|
|
- return this.move(sourceBucket, sourceRemotePath, sourceBucket, targetRemotePath, isOverWrite);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath) {
|
|
|
- return this._move(sourceBucket, sourceRemotePath, targetBucket, targetRemotePath, true, 0);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath, boolean isOverWrite){
|
|
|
- long startTime = System.nanoTime();
|
|
|
- List<Blob> list = this._move(sourceBucket, sourceRemotePath, targetBucket, targetRemotePath, isOverWrite, 0);
|
|
|
- log.debug(">> move.time:{}",TimeUnit.NANOSECONDS.toMillis(System.nanoTime()-startTime));
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<String> listBuckets(){
|
|
|
- List<String> list = new ArrayList<>();
|
|
|
- Page<Bucket> buckets = storage.list();
|
|
|
- buckets.iterateAll().forEach(bucket->list.add(bucket.getName()));
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> listObjects(String bucket) {
|
|
|
- return this.listObjects(bucket,"");
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> listObjects(String bucket, String remotePath) {
|
|
|
- remotePath = trimSeparator(remotePath);
|
|
|
- if(remotePath.indexOf("*")!=-1) {
|
|
|
- remotePath = remotePath.substring(0,remotePath.indexOf("*"));
|
|
|
- }else {
|
|
|
- Blob object = storage.get(BlobId.of(bucket,remotePath));
|
|
|
- if(object != null && !object.isDirectory()) {
|
|
|
- List<Blob> result = new ArrayList<>();
|
|
|
- result.add(object);
|
|
|
- return result;
|
|
|
- }
|
|
|
- }
|
|
|
- remotePath = trimSeparator(remotePath+"/");
|
|
|
-
|
|
|
- Page<Blob> blobs = storage.list(bucket,Storage.BlobListOption.currentDirectory(),Storage.BlobListOption.prefix(remotePath));
|
|
|
- if(blobs == null) {return null;}
|
|
|
-
|
|
|
- List<Blob> result = new ArrayList<>();
|
|
|
- blobs.iterateAll().forEach(blob->{result.add(blob);});
|
|
|
- return result;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> upload(String bucket, String storePath, String remotePath) throws IOException{
|
|
|
- return this.upload(bucket, storePath, remotePath, UploadType.stream, true);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> upload(String bucket, String storePath, String remotePath, UploadType uploadType) throws IOException{
|
|
|
- return this.upload(bucket, storePath, remotePath, uploadType, true);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> upload(String bucket, String storePath, String remotePath, boolean isOverWrite) throws IOException{
|
|
|
- return this.upload(bucket, storePath, remotePath, UploadType.stream, isOverWrite);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<Blob> upload(String bucket, String storePath, String remotePath, UploadType uploadType, boolean isOverWrite) throws IOException{
|
|
|
- long startTime = System.nanoTime();
|
|
|
- List<Blob> list = this._upload(bucket, storePath, remotePath, uploadType, true);
|
|
|
- log.debug(">> upload.time:{}",TimeUnit.NANOSECONDS.toMillis(System.nanoTime()-startTime));
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public BucketObjectInfo download(String bucket, String remotePath, OutputStream outputStream) {
|
|
|
- Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));
|
|
|
- blob.downloadTo(outputStream);
|
|
|
- return this._convert(blob,null,System.currentTimeMillis());
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public BucketObjectInfo download(String bucket, String remotePath, File file) {
|
|
|
- Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));
|
|
|
- String downloadStorePath = file.getPath();
|
|
|
- if(file.exists()) {file.delete();}
|
|
|
- blob.downloadTo(Paths.get(downloadStorePath));
|
|
|
- return this._convert(blob,downloadStorePath,System.currentTimeMillis());
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<BucketObjectInfo> download(String bucket, String remotePath, String storePath) {
|
|
|
- return this.download(bucket, remotePath, storePath, true);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public List<BucketObjectInfo> download(String bucket, String remotePath, String storePath, boolean isOverWrite) {
|
|
|
- long startTime = System.nanoTime();
|
|
|
- List<BucketObjectInfo> list = this._download(bucket, remotePath, storePath, isOverWrite);
|
|
|
- log.debug(">> download.time:{}",TimeUnit.NANOSECONDS.toMillis(System.nanoTime()-startTime));
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public void delete(String bucket, String remotePath) {
|
|
|
- this._delete(bucket, remotePath);
|
|
|
- }
|
|
|
-
|
|
|
- @Override
|
|
|
- public Blob compose(String bucket, List<String> sources, String targetPath) {
|
|
|
- return this._compose(bucket, sources, targetPath);
|
|
|
- }
|
|
|
-
|
|
|
- private BucketObjectInfo _convert(Blob blob, String downloadStorePath, long downloadTime) {
|
|
|
- BucketObjectInfo info = new BucketObjectInfo();
|
|
|
- info.setSystemInfo(getSystemInfo());
|
|
|
- info.setBucket(blob.getBucket());
|
|
|
- info.setPath(blob.getName());
|
|
|
- info.setName(extractFileName(blob.getName()));
|
|
|
- info.setSize(blob.getSize());
|
|
|
- info.setDirectory(blob.isDirectory());
|
|
|
- info.setCreateTime(blob.getCreateTimeOffsetDateTime() == null?null:blob.getCreateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()));
|
|
|
- info.setUpdateTime(blob.getUpdateTimeOffsetDateTime() == null?null:blob.getUpdateTimeOffsetDateTime().atZoneSameInstant(ZoneId.systemDefault()));
|
|
|
- info.setContentType(blob.getContentType());
|
|
|
- info.setDownloadStorePath(downloadStorePath);
|
|
|
- info.setDownloadTime(downloadTime);
|
|
|
- info.setMetadata(blob.getMetadata());
|
|
|
- return info;
|
|
|
- }
|
|
|
-
|
|
|
- private void _delete(String bucket, String remotePath) {
|
|
|
- Blob blob = storage.get(bucket, remotePath);
|
|
|
- if(blob != null) {
|
|
|
- storage.delete(bucket, remotePath, Storage.BlobSourceOption.generationMatch(blob.getGeneration()));
|
|
|
- }else {
|
|
|
- Page<Blob> page = storage.list(bucket,Storage.BlobListOption.prefix(remotePath));
|
|
|
- if(page.iterateAll().iterator().hasNext()) {
|
|
|
- for(Blob item : page.iterateAll()){
|
|
|
- if(item.isDirectory()) {continue;}
|
|
|
- this.delete(bucket,item.getName());
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- return;
|
|
|
- }
|
|
|
-
|
|
|
- private Blob _compose(String bucket, List<String> sources, String targetPath) {
|
|
|
- if(sources == null || sources.isEmpty()) {throw new SystemException("sources is null or empty.");}
|
|
|
-
|
|
|
- if(sources.size()>32) {
|
|
|
- String fileName = extractFileName(targetPath);
|
|
|
- String subPath = targetPath.replace(fileName,"")+System.nanoTime()+fileName;
|
|
|
-
|
|
|
- Blob composeBlob = null;
|
|
|
- Blob subBlob = null;
|
|
|
- try{
|
|
|
- List<String> subList1 = sources.subList(0,32);
|
|
|
- subBlob = this._compose(bucket, subList1, subPath);
|
|
|
-
|
|
|
- List<String> subList2 = sources.subList(32,sources.size());
|
|
|
- subList2.set(0, subBlob.getName());
|
|
|
-
|
|
|
- composeBlob = this._compose(bucket, subList2, targetPath);
|
|
|
-
|
|
|
- } catch(Exception e) {
|
|
|
- throw new SystemException(e.getMessage());
|
|
|
- } finally {
|
|
|
- if(subBlob != null) {subBlob.delete();}
|
|
|
- }
|
|
|
-
|
|
|
- return composeBlob;
|
|
|
- }
|
|
|
-
|
|
|
- Storage.BlobTargetOption precondition;
|
|
|
- if (storage.get(bucket, targetPath) == null) {
|
|
|
- precondition = Storage.BlobTargetOption.doesNotExist();
|
|
|
- } else {
|
|
|
- precondition = Storage.BlobTargetOption.generationMatch(storage.get(bucket, targetPath).getGeneration());
|
|
|
- }
|
|
|
-
|
|
|
- Storage.ComposeRequest composeRequest = Storage.ComposeRequest.newBuilder()
|
|
|
- .addSource(sources)
|
|
|
- .setTarget(BlobInfo.newBuilder(bucket, targetPath).build())
|
|
|
- .setTargetOptions(precondition)
|
|
|
- .build();
|
|
|
-
|
|
|
- return storage.compose(composeRequest);
|
|
|
- }
|
|
|
-
|
|
|
- private List<Blob> _move(String sourceBucket, String sourceRemotePath, String targetBucket, String targetRemotePath, boolean isOverWrite, int depth) {
|
|
|
- sourceRemotePath = trimSeparator(sourceRemotePath);
|
|
|
- targetRemotePath = trimSeparator(targetRemotePath);
|
|
|
-
|
|
|
- //같은 버킷, 같은 경로 체크
|
|
|
- if(sourceBucket.equals(targetBucket) && sourceRemotePath.equals(targetRemotePath)) {
|
|
|
- log.debug(">> [move] same bucket, same path");
|
|
|
- return null;
|
|
|
- }
|
|
|
-
|
|
|
- //소스원격경로가 파일인 경우
|
|
|
- Blob blob = storage.get(BlobId.of(sourceBucket, sourceRemotePath));
|
|
|
- log.debug(">> [move] blob({}): {}",sourceRemotePath,blob != null?"object":"directory");
|
|
|
-
|
|
|
- if(blob != null && !blob.isDirectory()) {
|
|
|
-
|
|
|
- //덮어쓰기가 아닌 경우, 존재여부 검사
|
|
|
- if(!isOverWrite && this.isExistObject(targetBucket, targetRemotePath)){
|
|
|
- throw new SystemException("버킷("+targetBucket+") 타켓원격경로("+targetRemotePath+")에 동일이름의 파일이 존재 합니다.");
|
|
|
- }
|
|
|
-
|
|
|
- //타켓원격경로가 "/" 로 끝나면 폴더로 인지하여 업로드 할 파일의 이름을 붙여서 원격경로에 저장할 이름을 만듬
|
|
|
- //타켓원격경로가 존재하는 디렉토리면 이 디렉토리에 저장할 이름을 만듬
|
|
|
- boolean isEnable=false;
|
|
|
- if(!isEnable && (targetRemotePath.substring(targetRemotePath.length()-1).equals("/"))) {isEnable=true;}
|
|
|
- if(!isEnable && (this.isExistObject(targetBucket, targetRemotePath) && this.isDirectory(targetBucket, targetRemotePath))) {isEnable=true;}
|
|
|
- if(isEnable) {targetRemotePath = trimSeparator(targetRemotePath+"/"+extractFileName(sourceRemotePath));}
|
|
|
-
|
|
|
- BlobId source = BlobId.of(sourceBucket, sourceRemotePath);
|
|
|
- BlobId target = BlobId.of(targetBucket, targetRemotePath);
|
|
|
-
|
|
|
- Storage.BlobTargetOption precondition;
|
|
|
- blob = storage.get(targetBucket, targetRemotePath);
|
|
|
- if(blob == null){
|
|
|
- precondition = Storage.BlobTargetOption.doesNotExist();
|
|
|
- }else{
|
|
|
- precondition = Storage.BlobTargetOption.generationMatch(blob.getGeneration());
|
|
|
- }
|
|
|
-
|
|
|
- storage.copy(Storage.CopyRequest.newBuilder().setSource(source).setTarget(target, precondition).build());
|
|
|
- blob = storage.get(target);
|
|
|
- if(depth == 0) {
|
|
|
- storage.get(source).delete();
|
|
|
- }
|
|
|
-
|
|
|
- List<Blob> list = new ArrayList<>();
|
|
|
- list.add(blob);
|
|
|
-
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- //소스원격경로(폴더) to 타켓원격경로(파일) 체크
|
|
|
- if(!this.isDirectory(targetBucket, targetRemotePath)) {
|
|
|
- throw new SystemException("폴더("+sourceRemotePath+")를 파일("+targetRemotePath+")로 이동 할 수 없습니다.");
|
|
|
- }
|
|
|
-
|
|
|
- //소스원격경로(폴더) empty 체크
|
|
|
- Page<Blob> sourcePage = storage.list(sourceBucket,Storage.BlobListOption.prefix(sourceRemotePath));
|
|
|
- if(!sourcePage.iterateAll().iterator().hasNext()) {
|
|
|
- log.debug(">> [move] sourceRemotePath({}) is empty.",sourceRemotePath);
|
|
|
- return null;
|
|
|
- }
|
|
|
-
|
|
|
- //타켓원격경로(폴더)로 이동
|
|
|
- List<Blob> list = new ArrayList<>();
|
|
|
- try{
|
|
|
- if(!isOverWrite && depth == 0) {
|
|
|
- Page<Blob> targetPage = storage.list(sourceBucket,Storage.BlobListOption.prefix(targetRemotePath));
|
|
|
- if(targetPage.iterateAll().iterator().hasNext()) {
|
|
|
- final Set<String> checkSet = new HashSet<>();
|
|
|
- final String _sPath = sourceRemotePath;
|
|
|
- final String _tPath = targetRemotePath;
|
|
|
- targetPage.iterateAll().forEach(t->{
|
|
|
- //log.debug(">> [move] target name:{}",t.getName());
|
|
|
- checkSet.add(t.getName().replace(_tPath,_sPath));
|
|
|
- });
|
|
|
- sourcePage.iterateAll().forEach(s->{
|
|
|
- if(checkSet.contains(s.getName())) {
|
|
|
- throw new SystemException("소스원격경로("+s.getName()+")와 타켓원격경로("+s.getName().replace(_sPath,_tPath)+")의 파일 이름이 동일합니다.");
|
|
|
- }
|
|
|
- });
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- for(Blob item : sourcePage.iterateAll()){
|
|
|
- String _sourceRemotePath = item.getName();
|
|
|
- String _targetRemotePath = item.getName().replace(sourceRemotePath,targetRemotePath);
|
|
|
- depth=1;
|
|
|
- List<Blob> _list = this._move(sourceBucket, _sourceRemotePath, targetBucket, _targetRemotePath, isOverWrite, depth++);
|
|
|
- list.addAll(_list);
|
|
|
- depth--;
|
|
|
- }
|
|
|
- }catch(Exception e){
|
|
|
- //오류시 이동 파일 삭제
|
|
|
- if(list != null && !list.isEmpty()) {
|
|
|
- list.forEach(i->{
|
|
|
- log.debug(">> [move] 예외발생 이동된 파일 삭제:{}",i.getName());
|
|
|
- this.delete(targetBucket,i.getName());
|
|
|
- });
|
|
|
- }
|
|
|
- throw e;
|
|
|
- }
|
|
|
-
|
|
|
- //소스원격경로(폴더) 객체 삭제
|
|
|
- if(depth==1) {
|
|
|
- for(Blob item : sourcePage.iterateAll()){
|
|
|
- if(item.isDirectory()) {continue;}
|
|
|
- this.delete(sourceBucket,item.getName());
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- private List<Blob> _upload(String bucket, String storePath, String remotePath, UploadType uploadType, boolean isOverWrite) throws IOException{
|
|
|
- if(StringUtils.isBlank(storePath)) {throw new SystemException("storePath("+storePath+") is empty.");}
|
|
|
-
|
|
|
- //업로드할 대상 존재여부 체크
|
|
|
- File source = new File(FilenameUtils.normalize(storePath));
|
|
|
- if(!source.exists()) {log.debug(">> [upload] storePath({}) is not exist.",storePath); return null;}
|
|
|
-
|
|
|
- List<Blob> list = new ArrayList<>();
|
|
|
-
|
|
|
- //업로드할 대상이 파일인 경우
|
|
|
- if(source.isFile()) {
|
|
|
- String _remotePath = trimSeparator(remotePath);
|
|
|
- if(_remotePath.equals("")) {_remotePath=source.getName();}
|
|
|
-
|
|
|
- //원격경로가 "/" 로 끝나면 폴더로 인지하여 업로드 할 파일의 이름을 붙여서 원격경로에 저장할 이름을 만듬
|
|
|
- //원격경로가 존재하는 디렉토리면 이 디렉토리에 저장할 이름을 만듬
|
|
|
- boolean isEnable=false;
|
|
|
- if(!isEnable && (_remotePath.substring(_remotePath.length()-1).equals("/"))) {isEnable=true;}
|
|
|
- if(!isEnable && (this.isExistObject(bucket, _remotePath) && this.isDirectory(bucket, _remotePath))) {isEnable=true;}
|
|
|
- if(isEnable) {_remotePath = trimSeparator(_remotePath+"/"+source.getName());}
|
|
|
-
|
|
|
- //덮어쓰기가 아닌 경우, 존재여부 검사
|
|
|
- if(!isOverWrite && this.isExistObject(bucket, _remotePath)){
|
|
|
- throw new SystemException("버킷("+bucket+") 원격경로("+_remotePath+")에 동일이름의 파일이 존재 합니다.");
|
|
|
- }
|
|
|
-
|
|
|
- //blobInfo 구성
|
|
|
- log.debug(">> [upload] remotePath: {}",_remotePath);
|
|
|
- BlobId blobId = BlobId.of(bucket, _remotePath);
|
|
|
- BlobInfo blobInfo = BlobInfo.newBuilder(blobId).build();
|
|
|
-
|
|
|
- //precondition(createFrom수행시 사전체크 사항) 구성
|
|
|
- Storage.BlobWriteOption precondition;
|
|
|
- Blob blob = storage.get(bucket, _remotePath);
|
|
|
- if(blob == null){
|
|
|
- //요청이 실행되기 전에 객체가 생성되면 요청이 실패
|
|
|
- precondition = Storage.BlobWriteOption.doesNotExist();
|
|
|
- } else {
|
|
|
- //요청이 실행되기 전에 기존 객체의 세대(generation)가 변경되면 요청이 실패, 다른 곳(process or thread)에서 변경여부 체크
|
|
|
- precondition = Storage.BlobWriteOption.generationMatch(blob.getGeneration());
|
|
|
- }
|
|
|
-
|
|
|
- if(uploadType.equals(UploadType.buffer)) {
|
|
|
- list.add(storage.createFrom(blobInfo, Paths.get(storePath), precondition));
|
|
|
- }
|
|
|
-
|
|
|
- if(uploadType.equals(UploadType.stream)) {
|
|
|
- list.add(storage.createFrom(blobInfo, new FileInputStream(new File(storePath)), precondition));
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- //업로드할 대상이 폴더인 경우
|
|
|
- if(source.isDirectory()) {
|
|
|
-
|
|
|
- if(!this.isDirectory(bucket, remotePath)) {
|
|
|
- throw new SystemException("폴더("+storePath+")를 파일("+remotePath+")로 업로드 할 수 없습니다.");
|
|
|
- }
|
|
|
-
|
|
|
- String _storePath = trimSeparator("/"+storePath);
|
|
|
- String _basePath = trimSeparator(remotePath);
|
|
|
- String _remotePath = "";
|
|
|
-
|
|
|
- for(File f : source.listFiles()) {
|
|
|
- String _sourcePath = trimSeparator("/"+f.getPath());
|
|
|
- if(f.isFile()) {_remotePath = _basePath + "/" + f.getName();}
|
|
|
- if(f.isDirectory()) {_remotePath = _basePath + "/" + _sourcePath.replace(_storePath,"");}
|
|
|
- _remotePath = trimSeparator(_remotePath);
|
|
|
-
|
|
|
- if(!_remotePath.equals("")) {
|
|
|
- try{
|
|
|
- List<Blob> _list = this._upload(bucket, f.getPath(), _remotePath, uploadType, isOverWrite);
|
|
|
- list.addAll(_list);
|
|
|
- }catch(Exception e) {
|
|
|
- if(list != null && !list.isEmpty()) {
|
|
|
- list.forEach(i->{
|
|
|
- log.debug(">> [upload] 예외발생 업로드된 파일 삭제:{}",i.getName());
|
|
|
- delete(bucket,i.getName());
|
|
|
- });
|
|
|
- }
|
|
|
- throw e;
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- private List<BucketObjectInfo> _download(String bucket, String remotePath, String storePath, boolean isOverWrite) {
|
|
|
-
|
|
|
- //다운로드 대상인 원격지경로가 파일인 경우 다운로드
|
|
|
- Blob blob = storage.get(BlobId.of(bucket, trimSeparator(remotePath)));
|
|
|
- log.debug(">> [download] blob({}): {}",remotePath,blob != null?"object":"directory");
|
|
|
- if(blob != null && !blob.isDirectory()) {
|
|
|
-
|
|
|
- String downloadStorePath = storePath;
|
|
|
-
|
|
|
- //디렉토리 경로 생성 후 다운로드 경로 구성
|
|
|
- File target = new File(FilenameUtils.normalize(downloadStorePath));
|
|
|
- if(!target.exists()) {target.mkdirs();target.delete();}
|
|
|
- if(target.isDirectory()) {
|
|
|
- downloadStorePath = downloadStorePath + File.separator + extractFileName(blob.getName());
|
|
|
- downloadStorePath = FilenameUtils.normalize(downloadStorePath);
|
|
|
- }
|
|
|
-
|
|
|
- //덮어쓰기가 아닌 경우, 존재여부 검사
|
|
|
- if(!isOverWrite && (new File(FilenameUtils.normalize(downloadStorePath))).exists()) {
|
|
|
- throw new SystemException("다운로드경로("+downloadStorePath+")에 파일이 존재 합니다. (isOverWrite:"+isOverWrite+")");
|
|
|
- }
|
|
|
-
|
|
|
- //다운로드 수행
|
|
|
- blob.downloadTo(Paths.get(downloadStorePath));
|
|
|
-
|
|
|
- List<BucketObjectInfo> list = new ArrayList<>();
|
|
|
- list.add(this._convert(blob,downloadStorePath,System.currentTimeMillis()));
|
|
|
-
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- //다운로드 대상인 원격지경로(폴더) empty 체크
|
|
|
- String prefix = trimSeparator(remotePath);
|
|
|
- Page<Blob> page = storage.list(bucket,Storage.BlobListOption.prefix(prefix));
|
|
|
- if(!page.iterateAll().iterator().hasNext()) {
|
|
|
- log.debug(">> [download] remotePath({}) is empty.",remotePath);
|
|
|
- return null;
|
|
|
- }
|
|
|
-
|
|
|
- //저장경로가 파일인 경우 체크
|
|
|
- File file = new File(FilenameUtils.normalize(storePath));
|
|
|
- if(file.exists() && file.isFile()) {
|
|
|
- throw new SystemException("저장경로("+storePath+")가 파일입니다. 원격지경로("+remotePath+")가 폴더인 경우 존재하는 파일경로를 지정 할 수 없습니다.");
|
|
|
- }
|
|
|
-
|
|
|
- //다운로드 대상인 원격지경로가 폴더인 경우 다운로드
|
|
|
- List<BucketObjectInfo> list = new ArrayList<>();
|
|
|
- try{
|
|
|
- file.mkdirs();
|
|
|
- String baseStorePath = file.getPath();
|
|
|
-
|
|
|
- for(Blob item : page.iterateAll()) {
|
|
|
- if(item.isDirectory()) {continue;}
|
|
|
-
|
|
|
- //slash(/)로 끝나면 디렉토리이며, 디렉토리는 skip
|
|
|
- String appendpath = item.getName().replace(remotePath,"");
|
|
|
- if(appendpath.substring(appendpath.length()-1).equals("/")) {continue;}
|
|
|
-
|
|
|
- //다운로드저장경로 만들기
|
|
|
- String downloadStorePath = FilenameUtils.normalize(baseStorePath+"/"+appendpath);
|
|
|
- log.debug(">> [download] downloadStorePath:{}",downloadStorePath);
|
|
|
-
|
|
|
- //다운로드저장경로 디렉토리 구성
|
|
|
- File _dirPath = new File(FilenameUtils.getFullPath(downloadStorePath));
|
|
|
- _dirPath.mkdirs();
|
|
|
-
|
|
|
- //덮어쓰기가 아닌 경우, 존재여부 검사
|
|
|
- if(!isOverWrite && (new File(FilenameUtils.normalize(downloadStorePath))).exists()) {
|
|
|
- throw new SystemException("저장경로에("+downloadStorePath+")에 동일이름 파일이 존재 합니다. (isOverWrite:\"+isOverWrite+\")");
|
|
|
- }
|
|
|
-
|
|
|
- //다운로드 수행
|
|
|
- blob = storage.get(BlobId.of(bucket, item.getName()));
|
|
|
- blob.downloadTo(Paths.get(downloadStorePath));
|
|
|
-
|
|
|
- list.add(this._convert(blob,downloadStorePath,System.currentTimeMillis()));
|
|
|
- }
|
|
|
-
|
|
|
- }catch(Exception e){
|
|
|
- //오류시 저장된 파일 삭제
|
|
|
- if(list != null && !list.isEmpty()) {
|
|
|
- list.forEach(i->{
|
|
|
- log.debug(">> [download] 예외발생 다운로드된 파일 삭제:{}",i.getName());
|
|
|
- (new File(i.getDownloadStorePath())).delete();
|
|
|
- });
|
|
|
- }
|
|
|
- throw e;
|
|
|
- }
|
|
|
-
|
|
|
- return list;
|
|
|
- }
|
|
|
-
|
|
|
- };
|
|
|
- }
|
|
|
-
|
|
|
- private String extractFileName(String path) {
|
|
|
- path = path.trim();
|
|
|
- String name = path.substring(path.lastIndexOf("\\")+1, path.length());
|
|
|
- if(path.indexOf("/")>-1) {name = path.substring(path.lastIndexOf("/")+1, path.length());}
|
|
|
- return name;
|
|
|
- }
|
|
|
-
|
|
|
- private String trimSeparator(String path) {
|
|
|
- path = path.trim();
|
|
|
- if(path.indexOf("\\") == 0) {path = path.replace("\\","/");}
|
|
|
- if(path.indexOf("/") == 0) {path = this.trimSeparator(path.substring(1));}
|
|
|
- while(path.indexOf("//")>0) {path = path.replace("//","/");}
|
|
|
- return path;
|
|
|
- }
|
|
|
-
|
|
|
- private Map<String,String> getSystemInfo() {
|
|
|
- if(this.systemInfo == null) {
|
|
|
- SystemUtil systemUtil = beanAid.getBean(SystemUtil.class);
|
|
|
- this.systemInfo=systemUtil.getSystemInfo();
|
|
|
- }
|
|
|
- return this.systemInfo;
|
|
|
- }
|
|
|
-
|
|
|
- };
|
|
|
-
|
|
|
- }
|
|
|
-
|
|
|
- //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
|
|
-
|
|
|
-}
|