Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 24 additions & 13 deletions engine/src/main/java/com/arcadedb/database/DocumentValidator.java
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@ public static void validateField(final MutableDocument document, final Property
if (p.getRegexp() != null)
// REGEXP
if (!(fieldValue.toString()).matches(p.getRegexp()))
throwValidationException(p, "does not match the regular expression '" + p.getRegexp() + "'. Field value is: " + fieldValue + ", record: " + document);
throwValidationException(p,
"does not match the regular expression '" + p.getRegexp() + "'. Field value is: " + fieldValue + ", record: "
+ document);

final Type propertyType = p.getType();

Expand All @@ -67,7 +69,9 @@ public static void validateField(final MutableDocument document, final Property
final RID rid = ((Identifiable) fieldValue).getIdentity();
final DocumentType embSchemaType = document.getDatabase().getSchema().getTypeByBucketId(rid.getBucketId());
if (!embSchemaType.instanceOf(ofType))
throwValidationException(p, "has been declared as LINK of '" + ofType + "' but a link to type '" + embSchemaType + "' is used. Value: " + fieldValue);
throwValidationException(p,
"has been declared as LINK of '" + ofType + "' but a link to type '" + embSchemaType + "' is used. Value: "
+ fieldValue);
}
}
break;
Expand All @@ -80,7 +84,8 @@ public static void validateField(final MutableDocument document, final Property
final DocumentType embSchemaType = ((EmbeddedDocument) fieldValue).getType();
if (!embSchemaType.instanceOf(ofType))
throwValidationException(p,
"has been declared as EMBEDDED of '" + ofType + "' but a document of type '" + embSchemaType + "' is used. Value: " + fieldValue);
"has been declared as EMBEDDED of '" + ofType + "' but a document of type '" + embSchemaType
+ "' is used. Value: " + fieldValue);
}
if (fieldValue instanceof MutableEmbeddedDocument)
((MutableEmbeddedDocument) fieldValue).validate();
Expand All @@ -98,17 +103,19 @@ public static void validateField(final MutableDocument document, final Property
if (embType != null) {
if (Type.getTypeByValue(item) != embType)
throwValidationException(p,
"has been declared as LIST of '" + ofType + "' but a value of type '" + Type.getTypeByValue(item) + "' is used. Value: " + fieldValue);
"has been declared as LIST of '" + ofType + "' but a value of type '" + Type.getTypeByValue(item)
+ "' is used. Value: " + fieldValue);
} else if (item instanceof EmbeddedDocument) {
if (!((EmbeddedDocument) item).getType().instanceOf(ofType))
throwValidationException(p,
"has been declared as LIST of '" + ofType + "' but an embedded document of type '" + embType + "' is used. Value: " + fieldValue);
throwValidationException(p, "has been declared as LIST of '" + ofType + "' but an embedded document of type '"
+ ((EmbeddedDocument) item).getType().getName() + "' is used. Value: " + fieldValue);
} else if (item instanceof Identifiable) {
final RID rid = ((Identifiable) item).getIdentity();
final DocumentType embSchemaType = document.getDatabase().getSchema().getTypeByBucketId(rid.getBucketId());
if (!embSchemaType.instanceOf(ofType))
throwValidationException(p,
"has been declared as LIST of '" + ofType + "' but a link to type '" + embSchemaType + "' is used. Value: " + fieldValue);
"has been declared as LIST of '" + ofType + "' but a link to type '" + embSchemaType + "' is used. Value: "
+ fieldValue);
}
}

Expand All @@ -129,18 +136,20 @@ public static void validateField(final MutableDocument document, final Property
if (embType != null) {
if (Type.getTypeByValue(item) != embType)
throwValidationException(p,
"has been declared as MAP of <String,'" + ofType + "'> but a value of type '" + Type.getTypeByValue(item) + "' is used. Value: "
+ fieldValue);
"has been declared as MAP of <String,'" + ofType + "'> but a value of type '" + Type.getTypeByValue(item)
+ "' is used. Value: " + fieldValue);
} else if (item instanceof EmbeddedDocument) {
if (!((EmbeddedDocument) item).getType().instanceOf(ofType))
throwValidationException(p,
"has been declared as MAP of <String,'" + ofType + "'> but an embedded document of type '" + embType + "' is used. Value: " + fieldValue);
"has been declared as MAP of <String,'" + ofType + "'> but an embedded document of type '" + embType
+ "' is used. Value: " + fieldValue);
} else if (item instanceof Identifiable) {
final RID rid = ((Identifiable) item).getIdentity();
final DocumentType embSchemaType = document.getDatabase().getSchema().getTypeByBucketId(rid.getBucketId());
if (!embSchemaType.instanceOf(ofType))
throwValidationException(p,
"has been declared as LIST of '" + ofType + "' but a link to type '" + embType + "' is used. Value: " + fieldValue);
"has been declared as LIST of '" + ofType + "' but a link to type '" + embType + "' is used. Value: "
+ fieldValue);
}
}

Expand Down Expand Up @@ -220,7 +229,8 @@ public static void validateField(final MutableDocument document, final Property
final Date fieldValueAsDate = (Date) Type.convert(database, fieldValue, Date.class);

if (fieldValueAsDate.compareTo(minAsDate) < 0)
throwValidationException(p, "contains the date " + fieldValue + " which precedes the first acceptable date (" + min + ")");
throwValidationException(p,
"contains the date " + fieldValue + " which precedes the first acceptable date (" + min + ")");
break;
}

Expand Down Expand Up @@ -321,7 +331,8 @@ public static void validateField(final MutableDocument document, final Property
final Date fieldValueAsDate = (Date) Type.convert(database, fieldValue, Date.class);

if (fieldValueAsDate.compareTo(maxAsDate) > 0)
throwValidationException(p, "contains the date " + fieldValue + " which is after the last acceptable date (" + max + ")");
throwValidationException(p,
"contains the date " + fieldValue + " which is after the last acceptable date (" + max + ")");
break;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ public class CheckClusterTypeStep extends AbstractExecutionStep {
final String targetType;
boolean found = false;

public CheckClusterTypeStep(final String targetBucketName, final String typez, final CommandContext context, final boolean profilingEnabled) {
public CheckClusterTypeStep(final String targetBucketName, final String typez, final CommandContext context,
final boolean profilingEnabled) {
super(context, profilingEnabled);
this.bucketName = targetBucketName;
this.bucket = null;
Expand All @@ -57,10 +58,13 @@ public ResultSet syncPull(final CommandContext context, final int nRecords) thro

if (bucketName != null)
bucketObj = db.getSchema().getBucketByName(bucketName);
else if (bucket.getBucketName() != null)
bucketObj = db.getSchema().getBucketByName(bucket.getBucketName());
else
bucketObj = db.getSchema().getBucketById(bucket.getBucketNumber());
else if (bucket != null) {
if (bucket.getBucketName() != null)
bucketObj = db.getSchema().getBucketByName(bucket.getBucketName());
else
bucketObj = db.getSchema().getBucketById(bucket.getBucketNumber());
} else
bucketObj = null;

if (bucketObj == null)
throw new CommandExecutionException("Bucket not found: " + bucketName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ public class TraverseExecutionPlanner {

public TraverseExecutionPlanner(final TraverseStatement statement) {
//copying the content, so that it can be manipulated and optimized
this.projections = statement.getProjections() == null ? null : statement.getProjections().stream().map(x -> x.copy()).collect(Collectors.toList());
this.projections = statement.getProjections() == null ?
null :
statement.getProjections().stream().map(x -> x.copy()).collect(Collectors.toList());

this.target = statement.getTarget();
this.whileClause = statement.getWhileClause() == null ? null : statement.getWhileClause().copy();
Expand Down Expand Up @@ -97,7 +99,8 @@ private void handleTraversal(final SelectExecutionPlan result, final CommandCont
//TODO
}

private void handleFetchFromTarget(final SelectExecutionPlan result, final CommandContext context, final boolean profilingEnabled) {
private void handleFetchFromTarget(final SelectExecutionPlan result, final CommandContext context,
final boolean profilingEnabled) {

final FromItem target = this.target == null ? null : this.target.getItem();
if (target == null) {
Expand Down Expand Up @@ -126,8 +129,8 @@ private void handleFetchFromTarget(final SelectExecutionPlan result, final Comma
}
}

private void handleInputParamAsTarget(final SelectExecutionPlan result, final InputParameter inputParam, final CommandContext context,
final boolean profilingEnabled) {
private void handleInputParamAsTarget(final SelectExecutionPlan result, final InputParameter inputParam,
final CommandContext context, final boolean profilingEnabled) {
final Object paramValue = inputParam.getValue(context.getInputParameters());
if (paramValue == null) {
result.chain(new EmptyStep(context, profilingEnabled));//nothing to return
Expand Down Expand Up @@ -186,8 +189,8 @@ private void handleNoTarget(final SelectExecutionPlan result, final CommandConte
result.chain(new EmptyDataGeneratorStep(1, context, profilingEnabled));
}

private void handleIndexAsTarget(final SelectExecutionPlan result, final IndexIdentifier indexIdentifier, final CommandContext context,
final boolean profilingEnabled) {
private void handleIndexAsTarget(final SelectExecutionPlan result, final IndexIdentifier indexIdentifier,
final CommandContext context, final boolean profilingEnabled) {
final String indexName = indexIdentifier.getIndexName();
final RangeIndex index = (RangeIndex) context.getDatabase().getSchema().getIndexByName(indexName);
if (index == null) {
Expand Down Expand Up @@ -220,57 +223,53 @@ private void handleIndexAsTarget(final SelectExecutionPlan result, final IndexId
}
}

private void handleRidsAsTarget(final SelectExecutionPlan plan, final List<Rid> rids, final CommandContext context, final boolean profilingEnabled) {
private void handleRidsAsTarget(final SelectExecutionPlan plan, final List<Rid> rids, final CommandContext context,
final boolean profilingEnabled) {
final List<RID> actualRids = new ArrayList<>();
for (final Rid rid : rids) {
actualRids.add(rid.toRecordId((Result) null, context));
}
plan.chain(new FetchFromRidsStep(actualRids, context, profilingEnabled));
}

private void handleClassAsTarget(final SelectExecutionPlan plan, final FromClause queryTarget, final CommandContext context, final boolean profilingEnabled) {
private void handleClassAsTarget(final SelectExecutionPlan plan, final FromClause queryTarget, final CommandContext context,
final boolean profilingEnabled) {
final Identifier identifier = queryTarget.getItem().getIdentifier();

final Boolean orderByRidAsc = null;//null: no order. true: asc, false:desc
final FetchFromClassExecutionStep fetcher = new FetchFromClassExecutionStep(identifier.getStringValue(), null, context, orderByRidAsc, profilingEnabled);
final FetchFromClassExecutionStep fetcher = new FetchFromClassExecutionStep(identifier.getStringValue(), null, context,
orderByRidAsc, profilingEnabled);
plan.chain(fetcher);
}

private void handleClustersAsTarget(final SelectExecutionPlan plan, final List<Bucket> clusters, final CommandContext context,
final boolean profilingEnabled) {
final Database db = context.getDatabase();
final Boolean orderByRidAsc = null;//null: no order. true: asc, false:desc
if (clusters.size() == 1) {
final Bucket bucket = clusters.get(0);
Integer bucketId = bucket.getBucketNumber();
if (bucketId == null) {
if (bucketId == null)
bucketId = db.getSchema().getBucketByName(bucket.getBucketName()).getFileId();
}

final FetchFromClusterExecutionStep step = new FetchFromClusterExecutionStep(bucketId, context, profilingEnabled);
// TODO: THIS SEEMS A BUG
if (Boolean.TRUE.equals(orderByRidAsc)) {
step.setOrder(FetchFromClusterExecutionStep.ORDER_ASC);
} else if (Boolean.FALSE.equals(orderByRidAsc)) {
step.setOrder(FetchFromClusterExecutionStep.ORDER_DESC);
}
plan.chain(step);
} else {
final int[] bucketIds = new int[clusters.size()];
for (int i = 0; i < clusters.size(); i++) {
final Bucket bucket = clusters.get(i);
Integer bucketId = bucket.getBucketNumber();
if (bucketId == null) {
if (bucketId == null)
bucketId = db.getSchema().getBucketByName(bucket.getBucketName()).getFileId();
}

bucketIds[i] = bucketId;
}
final FetchFromClustersExecutionStep step = new FetchFromClustersExecutionStep(bucketIds, context, orderByRidAsc, profilingEnabled);
final FetchFromClustersExecutionStep step = new FetchFromClustersExecutionStep(bucketIds, context, null, profilingEnabled);
plan.chain(step);
}
}

private void handleSubqueryAsTarget(final SelectExecutionPlan plan, final Statement subQuery, final CommandContext context, final boolean profilingEnabled) {
private void handleSubqueryAsTarget(final SelectExecutionPlan plan, final Statement subQuery, final CommandContext context,
final boolean profilingEnabled) {
final BasicCommandContext subCtx = new BasicCommandContext();
subCtx.setDatabase(context.getDatabase());
subCtx.setParent(context);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ public ResultSet executeDDL(final CommandContext context) {
final DocumentType typez = db.getSchema().getType(typeName.getStringValue());

if (typez == null)
throw new CommandExecutionException("Invalid type name or type not found: " + typez);
throw new CommandExecutionException("Type name is null");

final Property property = typez.getProperty(propertyName.getStringValue());
if (property == null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ else if (entry.getKey().toString().equalsIgnoreCase("maxAttempts"))
private static void buildIndex(final int maxAttempts, Database database, Index.BuildIndexCallback callback, Index idx,
final int batchSize) {
if (idx == null)
throw new CommandExecutionException("Index '" + idx.getName() + "' not found");
throw new CommandExecutionException("Index name is null");

if (!idx.isAutomatic())
throw new CommandExecutionException(
Expand Down
Loading