target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test(expected = AccessDeniedOrObjectDoesNotExistException.class) @Betamax(tape = "records_shouldThrowAccessDeniedOrObjectDoesNotExistExceptionWhileTryingToRevokePermissions") public void shouldThrowAccessDeniedOrObjectDoesNotExistExceptionWhileTryingToRevokePermissions() throws MCSException, IOException { RecordServiceClient client = new RecordServiceClient("http: client.revokePermissionsToVersion(CLOUD_ID, REPRESENTATION_NAME, VERSION, "user", Permission.READ); }
public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); handleDeleteRequest(request); }
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); handleDeleteRequest(request); } }
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); handleDeleteRequest(request); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); }
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); handleDeleteRequest(request); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
RecordServiceClient extends MCSClient { public void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); handleDeleteRequest(request); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
@Test(expected = DriverException.class) @Betamax(tape = "records_shouldThrowDriverExceptionWhileMcsIsNotAvailable") public void shouldThrowMcsExceptionWhileMcsIsNotAvailable() throws MCSException { RecordServiceClient client = new RecordServiceClient("http: client.grantPermissionsToVersion(CLOUD_ID, REPRESENTATION_NAME, VERSION, "user", Permission.READ); }
public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); Response response = null; try { response = request.post(null); if(response.getStatus() == Response.Status.NOT_MODIFIED.getStatusCode()) { throw new MCSException("Permissions not modified"); } else if (response.getStatus() != Response.Status.OK.getStatusCode()) { throwException(response); } } finally { closeResponse(response); } }
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); Response response = null; try { response = request.post(null); if(response.getStatus() == Response.Status.NOT_MODIFIED.getStatusCode()) { throw new MCSException("Permissions not modified"); } else if (response.getStatus() != Response.Status.OK.getStatusCode()) { throwException(response); } } finally { closeResponse(response); } } }
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); Response response = null; try { response = request.post(null); if(response.getStatus() == Response.Status.NOT_MODIFIED.getStatusCode()) { throw new MCSException("Permissions not modified"); } else if (response.getStatus() != Response.Status.OK.getStatusCode()) { throwException(response); } } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); }
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); Response response = null; try { response = request.post(null); if(response.getStatus() == Response.Status.NOT_MODIFIED.getStatusCode()) { throw new MCSException("Permissions not modified"); } else if (response.getStatus() != Response.Status.OK.getStatusCode()) { throwException(response); } } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
RecordServiceClient extends MCSClient { public void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_PERMISSION) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(VERSION, version) .resolveTemplate(PERMISSION, permission.getValue()) .resolveTemplate(USER_NAME, userName); Builder request = target.request(); Response response = null; try { response = request.post(null); if(response.getStatus() == Response.Status.NOT_MODIFIED.getStatusCode()) { throw new MCSException("Permissions not modified"); } else if (response.getStatus() != Response.Status.OK.getStatusCode()) { throwException(response); } } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
@Test @Betamax(tape = "records_shouldCreateNewRepresentationAndUploadFile") public void shouldCreateNewRepresentationAndUploadAFile() throws IOException, FileNotFoundException, MCSException { RecordServiceClient client = new RecordServiceClient("http: InputStream stream = new ByteArrayInputStream("example File Content".getBytes(StandardCharsets.UTF_8)); client.createRepresentation("FGDNTHPJQAUTEIGAHOALM2PMFSDRD726U5LNGMPYZZ34ZNVT5YGA", "sampleRepresentationName9", "sampleProvider", stream, "fileName", "mediaType"); }
public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
@Test @Betamax(tape = "records_shouldCreateNewRepresentationAndUploadFile") public void shouldCreateNewRepresentationAndUploadAFile_1() throws IOException, FileNotFoundException, MCSException { RecordServiceClient client = new RecordServiceClient("http: InputStream stream = new ByteArrayInputStream("example File Content".getBytes(StandardCharsets.UTF_8)); client.createRepresentation("FGDNTHPJQAUTEIGAHOALM2PMFSDRD726U5LNGMPYZZ34ZNVT5YGA", "sampleRepresentationName9", "sampleProvider", stream, "mediaType"); }
public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
RecordServiceClient extends MCSClient { public URI createRepresentation(String cloudId, String representationName, String providerId) throws MCSException { WebTarget target = client .target(baseUrl) .path(REPRESENTATION_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName); Builder request = target.request(); Form form = new Form(); form.param(PROVIDER_ID, providerId); Response response = null; return handleRepresentationResponse(form, request, response); } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
@Betamax(tape = "records_shouldRetrieveRepresentationByRevision") @Test public void shouldRetrieveRepresentationRevision() throws MCSException { RecordServiceClient instance = new RecordServiceClient("http: List<Representation> representations = instance.getRepresentationsByRevision("Z6DX3RWCEFUUSGRUWP6QZWRIZKY7HI5Y7H4UD3OQVB3SRPAUVZHA", "REPRESENTATION1", "Revision_2", "Revision_Provider", "2018-08-28T07:13:34.658"); assertNotNull(representations); assertTrue(representations.size() == 1); assertEquals("REPRESENTATION1", representations.get(0).getRepresentationName()); assertEquals("68b4cc30-aa8d-11e8-8289-1c6f653f9042", representations.get(0).getVersion()); }
public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
@Test public void testFindAclNotExisting() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); service.findAclObjectIdentity(newAoi); }
@Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Betamax(tape = "records_shouldThrowRepresentationNotExist") @Test(expected = RepresentationNotExistsException.class) public void shouldThrowRepresentationNotExists() throws MCSException { RecordServiceClient instance = new RecordServiceClient("http: instance.getRepresentationsByRevision("Z6DX3RWCEFUUSGRUWP6QZWRIZKY7HI5Y7H4UD3OQVB3SRPAUVZHA", "REPRESENTATION2", "Revision_2", "Revision_Provider", "2018-08-28T07:13:34.658"); }
public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
RecordServiceClient extends MCSClient { public List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp) throws MCSException { WebTarget webtarget = client .target(baseUrl) .path(REPRESENTATION_REVISIONS_RESOURCE) .resolveTemplate(CLOUD_ID, cloudId) .resolveTemplate(REPRESENTATION_NAME, representationName) .resolveTemplate(REVISION_NAME, revisionName); if (revisionProviderId != null) { webtarget = webtarget.queryParam(F_REVISION_PROVIDER_ID, revisionProviderId); } else { throw new MCSException("RevisionProviderId is required"); } if (revisionTimestamp != null) { webtarget = webtarget.queryParam(F_REVISION_TIMESTAMP, revisionTimestamp); } Builder request = webtarget.request(); Response response = null; try { response = request.get(); if (response.getStatus() == Response.Status.OK.getStatusCode()) { return response.readEntity(new GenericType<List<Representation>>() {}); } else { ErrorInfo errorInfo = response.readEntity(ErrorInfo.class); throw MCSExceptionProvider.generateException(errorInfo); } } catch (MessageBodyProviderNotFoundException e) { String out = webtarget.getUri().toString(); throw new MCSException(out, e); } finally { closeResponse(response); } } RecordServiceClient(String baseUrl); RecordServiceClient(String baseUrl, final int connectTimeoutInMillis, final int readTimeoutInMillis); RecordServiceClient(String baseUrl, final String username, final String password); RecordServiceClient(String baseUrl, final String authorizationHeader); RecordServiceClient(String baseUrl, final String authorizationHeader, final String username, final String password, final int connectTimeoutInMillis, final int readTimeoutInMillis); void useAuthorizationHeader(final String authorizationHeader); Record getRecord(String cloudId); void deleteRecord(String cloudId); List<Representation> getRepresentations(String cloudId); Representation getRepresentation(String cloudId, String representationName); URI createRepresentation(String cloudId, String representationName, String providerId); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String fileName, String mediaType, String key, String value); URI createRepresentation(String cloudId, String representationName, String providerId, InputStream data, String mediaType); void deleteRepresentation(String cloudId, String representationName); List<Representation> getRepresentations(String cloudId, String representationName); Representation getRepresentation(String cloudId, String representationName, String version); Representation getRepresentation(String cloudId, String representationName, String version, String key, String value); void deleteRepresentation(String cloudId, String representationName, String version); void deleteRepresentation(String cloudId, String representationName, String version, String key, String value); URI copyRepresentation(String cloudId, String representationName, String version); URI persistRepresentation(String cloudId, String representationName, String version); void grantPermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void revokePermissionsToVersion(String cloudId, String representationName, String version, String userName, Permission permission); void permitVersion(String cloudId, String representationName, String version); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp); List<Representation> getRepresentationsByRevision( String cloudId, String representationName, String revisionName, String revisionProviderId, String revisionTimestamp, String key, String value); void close(); }
@Test(expected = ProviderDoesNotExistException.class) public void shouldFailWhenFetchingNonExistingProvider() throws ProviderDoesNotExistException { cassandraDataProviderService.getProvider("provident"); }
@Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}''", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } else { return dp; } }
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}''", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } else { return dp; } } }
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}''", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } else { return dp; } } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); }
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}''", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } else { return dp; } } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); @Override ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit); @Override DataProvider getProvider(String providerId); @Override DataProvider createProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(DataProvider dataProvider); @Override void deleteProvider(String providerId); }
CassandraDataProviderService implements DataProviderService { @Override public DataProvider getProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("getProvider() providerId='{}'", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}''", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } else { return dp; } } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); @Override ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit); @Override DataProvider getProvider(String providerId); @Override DataProvider createProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(DataProvider dataProvider); @Override void deleteProvider(String providerId); }
@Test public void shouldReturnEmptyArrayWhenNoProviderAdded() { assertTrue("Expecting no providers", cassandraDataProviderService.getProviders(null, 1).getResults().isEmpty()); }
@Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataProviderDao.getProviders(thresholdProviderId, limit + 1); final int providerSize = providers.size(); if (providerSize == limit + 1) { nextProvider = providers.get(limit).getId(); providers.remove(limit); } LOGGER.info("getProviders() returning providers={} and nextProvider={} for thresholdProviderId='{}', limit='{}'", providerSize, nextProvider, thresholdProviderId, limit); return new ResultSlice<DataProvider>(nextProvider, providers); }
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataProviderDao.getProviders(thresholdProviderId, limit + 1); final int providerSize = providers.size(); if (providerSize == limit + 1) { nextProvider = providers.get(limit).getId(); providers.remove(limit); } LOGGER.info("getProviders() returning providers={} and nextProvider={} for thresholdProviderId='{}', limit='{}'", providerSize, nextProvider, thresholdProviderId, limit); return new ResultSlice<DataProvider>(nextProvider, providers); } }
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataProviderDao.getProviders(thresholdProviderId, limit + 1); final int providerSize = providers.size(); if (providerSize == limit + 1) { nextProvider = providers.get(limit).getId(); providers.remove(limit); } LOGGER.info("getProviders() returning providers={} and nextProvider={} for thresholdProviderId='{}', limit='{}'", providerSize, nextProvider, thresholdProviderId, limit); return new ResultSlice<DataProvider>(nextProvider, providers); } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); }
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataProviderDao.getProviders(thresholdProviderId, limit + 1); final int providerSize = providers.size(); if (providerSize == limit + 1) { nextProvider = providers.get(limit).getId(); providers.remove(limit); } LOGGER.info("getProviders() returning providers={} and nextProvider={} for thresholdProviderId='{}', limit='{}'", providerSize, nextProvider, thresholdProviderId, limit); return new ResultSlice<DataProvider>(nextProvider, providers); } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); @Override ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit); @Override DataProvider getProvider(String providerId); @Override DataProvider createProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(DataProvider dataProvider); @Override void deleteProvider(String providerId); }
CassandraDataProviderService implements DataProviderService { @Override public ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit) { LOGGER.info("getProviders() thresholdProviderId='{}', limit='{}'", thresholdProviderId, limit); String nextProvider = null; List<DataProvider> providers = dataProviderDao.getProviders(thresholdProviderId, limit + 1); final int providerSize = providers.size(); if (providerSize == limit + 1) { nextProvider = providers.get(limit).getId(); providers.remove(limit); } LOGGER.info("getProviders() returning providers={} and nextProvider={} for thresholdProviderId='{}', limit='{}'", providerSize, nextProvider, thresholdProviderId, limit); return new ResultSlice<DataProvider>(nextProvider, providers); } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); @Override ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit); @Override DataProvider getProvider(String providerId); @Override DataProvider createProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(DataProvider dataProvider); @Override void deleteProvider(String providerId); }
@Test(expected = ProviderDoesNotExistException.class) public void shouldThrowExceptionWhenDeletingNonExistingProvider() throws ProviderDoesNotExistException { cassandraDataProviderService.deleteProvider("not existing provident"); }
@Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}'", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } dataProviderDao.deleteProvider(providerId); }
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}'", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } dataProviderDao.deleteProvider(providerId); } }
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}'", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } dataProviderDao.deleteProvider(providerId); } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); }
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}'", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } dataProviderDao.deleteProvider(providerId); } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); @Override ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit); @Override DataProvider getProvider(String providerId); @Override DataProvider createProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(DataProvider dataProvider); @Override void deleteProvider(String providerId); }
CassandraDataProviderService implements DataProviderService { @Override public void deleteProvider(String providerId) throws ProviderDoesNotExistException { LOGGER.info("Deleting provider {}", providerId); DataProvider dp = dataProviderDao.getProvider(providerId); if (dp == null) { LOGGER.warn("ProviderDoesNotExistException providerId='{}'", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } dataProviderDao.deleteProvider(providerId); } CassandraDataProviderService(CassandraDataProviderDAO dataProviderDao); @Override ResultSlice<DataProvider> getProviders(String thresholdProviderId, int limit); @Override DataProvider getProvider(String providerId); @Override DataProvider createProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(String providerId, DataProviderProperties properties); @Override DataProvider updateProvider(DataProvider dataProvider); @Override void deleteProvider(String providerId); }
@Test(expected = RecordDoesNotExistException.class) public void testRecordDoesNotExist() throws Exception { service.getCloudId("test2", "test2"); }
@Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = localIdDao.searchById(providerId, recordId); if (cloudIds.isEmpty()) { throw new RecordDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getErrorInfo(providerId, recordId))); } final CloudId cloudId = cloudIds.get(0); LOGGER.info("getCloudId() returning cloudId='{}'", cloudId); return cloudId; }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = localIdDao.searchById(providerId, recordId); if (cloudIds.isEmpty()) { throw new RecordDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getErrorInfo(providerId, recordId))); } final CloudId cloudId = cloudIds.get(0); LOGGER.info("getCloudId() returning cloudId='{}'", cloudId); return cloudId; } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = localIdDao.searchById(providerId, recordId); if (cloudIds.isEmpty()) { throw new RecordDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getErrorInfo(providerId, recordId))); } final CloudId cloudId = cloudIds.get(0); LOGGER.info("getCloudId() returning cloudId='{}'", cloudId); return cloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = localIdDao.searchById(providerId, recordId); if (cloudIds.isEmpty()) { throw new RecordDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getErrorInfo(providerId, recordId))); } final CloudId cloudId = cloudIds.get(0); LOGGER.info("getCloudId() returning cloudId='{}'", cloudId); return cloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId getCloudId(String providerId, String recordId) throws DatabaseConnectionException, RecordDoesNotExistException { LOGGER.info("getCloudId() providerId='{}', recordId='{}'", providerId, recordId); List<CloudId> cloudIds = localIdDao.searchById(providerId, recordId); if (cloudIds.isEmpty()) { throw new RecordDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.RECORD_DOES_NOT_EXIST.getErrorInfo(providerId, recordId))); } final CloudId cloudId = cloudIds.get(0); LOGGER.info("getCloudId() returning cloudId='{}'", cloudId); return cloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test(expected = CloudIdDoesNotExistException.class) public void testGetLocalIdsByCloudId() throws Exception { List<CloudId> gid = service.getLocalIdsByCloudId(IdGenerator .encodeWithSha256AndBase32("/test11/test11")); CloudId gId = service.createCloudId("test11", "test11"); gid = service.getLocalIdsByCloudId(gId.getId()); assertEquals(gid.size(), 1); }
@Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId={}", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = new ArrayList<>(); for (CloudId cId : cloudIds) { if (localIdDao.searchById(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()).size() > 0) { localIds.add(cId); } } return localIds; }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId={}", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = new ArrayList<>(); for (CloudId cId : cloudIds) { if (localIdDao.searchById(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()).size() > 0) { localIds.add(cId); } } return localIds; } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId={}", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = new ArrayList<>(); for (CloudId cId : cloudIds) { if (localIdDao.searchById(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()).size() > 0) { localIds.add(cId); } } return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId={}", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = new ArrayList<>(); for (CloudId cId : cloudIds) { if (localIdDao.searchById(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()).size() > 0) { localIds.add(cId); } } return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getLocalIdsByCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("getLocalIdsByCloudId() cloudId='{}'", cloudId); List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId={}", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = new ArrayList<>(); for (CloudId cId : cloudIds) { if (localIdDao.searchById(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()).size() > 0) { localIds.add(cId); } } return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test public void testGetCloudIdsByProvider() throws Exception { String providerId = "providerId"; dataProviderDao.createDataProvider(providerId, new DataProviderProperties()); service.createCloudId(providerId, "test3"); service.createCloudId(providerId, "test2"); List<CloudId> cIds = service .getCloudIdsByProvider(providerId, null, 10000); assertThat(cIds.size(), is(2)); }
@Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } if (startRecordId == null) { return localIdDao.searchById(providerId); } else { return localIdDao.searchByIdWithPagination(startRecordId, limit, providerId); } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } if (startRecordId == null) { return localIdDao.searchById(providerId); } else { return localIdDao.searchByIdWithPagination(startRecordId, limit, providerId); } } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } if (startRecordId == null) { return localIdDao.searchById(providerId); } else { return localIdDao.searchByIdWithPagination(startRecordId, limit, providerId); } } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } if (startRecordId == null) { return localIdDao.searchById(providerId); } else { return localIdDao.searchByIdWithPagination(startRecordId, limit, providerId); } } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("getCloudIdsByProvider() providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', startRecordId='{}', end='{}'", providerId, startRecordId, limit); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } if (startRecordId == null) { return localIdDao.searchById(providerId); } else { return localIdDao.searchByIdWithPagination(startRecordId, limit, providerId); } } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test(expected = IdHasBeenMappedException.class) public void testCreateIdMapping() throws Exception { dataProviderDao.createDataProvider("test12", new DataProviderProperties()); CloudId gid = service.createCloudId("test12", "test12"); service.createIdMapping(gid.getId(), "test12", "test13"); service.createIdMapping(gid.getId(), "test12", "test13"); }
@Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createIdMapping() creating mapping for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = localIdDao.searchById(providerId, recordId); if (!localIds.isEmpty()) { LOGGER.warn("IdHasBeenMappedException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new IdHasBeenMappedException(new IdentifierErrorInfo( IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getHttpCode(), IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getErrorInfo(providerId, recordId, cloudId))); } localIdDao.insert(providerId, recordId, cloudId); cloudIdDao.insert(false, cloudId, providerId, recordId); CloudId newCloudId = new CloudId(); newCloudId.setId(cloudId); LocalId lid = new LocalId(); lid.setProviderId(providerId); lid.setRecordId(recordId); newCloudId.setLocalId(lid); LOGGER.info("createIdMapping() new mapping created! new cloudId='{}' for already " + "existing cloudId='{}', providerId='{}', recordId='{}'", newCloudId, cloudId, providerId, recordId); return newCloudId; }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createIdMapping() creating mapping for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = localIdDao.searchById(providerId, recordId); if (!localIds.isEmpty()) { LOGGER.warn("IdHasBeenMappedException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new IdHasBeenMappedException(new IdentifierErrorInfo( IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getHttpCode(), IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getErrorInfo(providerId, recordId, cloudId))); } localIdDao.insert(providerId, recordId, cloudId); cloudIdDao.insert(false, cloudId, providerId, recordId); CloudId newCloudId = new CloudId(); newCloudId.setId(cloudId); LocalId lid = new LocalId(); lid.setProviderId(providerId); lid.setRecordId(recordId); newCloudId.setLocalId(lid); LOGGER.info("createIdMapping() new mapping created! new cloudId='{}' for already " + "existing cloudId='{}', providerId='{}', recordId='{}'", newCloudId, cloudId, providerId, recordId); return newCloudId; } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createIdMapping() creating mapping for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = localIdDao.searchById(providerId, recordId); if (!localIds.isEmpty()) { LOGGER.warn("IdHasBeenMappedException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new IdHasBeenMappedException(new IdentifierErrorInfo( IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getHttpCode(), IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getErrorInfo(providerId, recordId, cloudId))); } localIdDao.insert(providerId, recordId, cloudId); cloudIdDao.insert(false, cloudId, providerId, recordId); CloudId newCloudId = new CloudId(); newCloudId.setId(cloudId); LocalId lid = new LocalId(); lid.setProviderId(providerId); lid.setRecordId(recordId); newCloudId.setLocalId(lid); LOGGER.info("createIdMapping() new mapping created! new cloudId='{}' for already " + "existing cloudId='{}', providerId='{}', recordId='{}'", newCloudId, cloudId, providerId, recordId); return newCloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createIdMapping() creating mapping for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = localIdDao.searchById(providerId, recordId); if (!localIds.isEmpty()) { LOGGER.warn("IdHasBeenMappedException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new IdHasBeenMappedException(new IdentifierErrorInfo( IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getHttpCode(), IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getErrorInfo(providerId, recordId, cloudId))); } localIdDao.insert(providerId, recordId, cloudId); cloudIdDao.insert(false, cloudId, providerId, recordId); CloudId newCloudId = new CloudId(); newCloudId.setId(cloudId); LocalId lid = new LocalId(); lid.setProviderId(providerId); lid.setRecordId(recordId); newCloudId.setLocalId(lid); LOGGER.info("createIdMapping() new mapping created! new cloudId='{}' for already " + "existing cloudId='{}', providerId='{}', recordId='{}'", newCloudId, cloudId, providerId, recordId); return newCloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createIdMapping(String cloudId, String providerId, String recordId) throws DatabaseConnectionException, CloudIdDoesNotExistException, IdHasBeenMappedException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createIdMapping() creating mapping for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } List<CloudId> cloudIds = cloudIdDao.searchById(cloudId); if (cloudIds.isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = localIdDao.searchById(providerId, recordId); if (!localIds.isEmpty()) { LOGGER.warn("IdHasBeenMappedException for cloudId='{}', providerId='{}', recordId='{}'", cloudId, providerId, recordId); throw new IdHasBeenMappedException(new IdentifierErrorInfo( IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getHttpCode(), IdentifierErrorTemplate.ID_HAS_BEEN_MAPPED.getErrorInfo(providerId, recordId, cloudId))); } localIdDao.insert(providerId, recordId, cloudId); cloudIdDao.insert(false, cloudId, providerId, recordId); CloudId newCloudId = new CloudId(); newCloudId.setId(cloudId); LocalId lid = new LocalId(); lid.setProviderId(providerId); lid.setRecordId(recordId); newCloudId.setLocalId(lid); LOGGER.info("createIdMapping() new mapping created! new cloudId='{}' for already " + "existing cloudId='{}', providerId='{}', recordId='{}'", newCloudId, cloudId, providerId, recordId); return newCloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test(expected = RecordDoesNotExistException.class) public void testRemoveIdMapping() throws Exception { dataProviderDao.createDataProvider("test16", new DataProviderProperties()); service.createCloudId("test16", "test16"); service.removeIdMapping("test16", "test16"); service.getCloudId("test16", "test16"); }
@Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', recordId='{}'", providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } localIdDao.delete(providerId, recordId); LOGGER.info("Id mapping removed for providerId='{}', recordId='{}'", providerId, recordId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', recordId='{}'", providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } localIdDao.delete(providerId, recordId); LOGGER.info("Id mapping removed for providerId='{}', recordId='{}'", providerId, recordId); } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', recordId='{}'", providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } localIdDao.delete(providerId, recordId); LOGGER.info("Id mapping removed for providerId='{}', recordId='{}'", providerId, recordId); } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', recordId='{}'", providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } localIdDao.delete(providerId, recordId); LOGGER.info("Id mapping removed for providerId='{}', recordId='{}'", providerId, recordId); } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public void removeIdMapping(String providerId, String recordId) throws DatabaseConnectionException, ProviderDoesNotExistException { LOGGER.info("removeIdMapping() removing Id mapping for providerId='{}', recordId='{}' ...", providerId, recordId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId='{}', recordId='{}'", providerId, recordId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } localIdDao.delete(providerId, recordId); LOGGER.info("Id mapping removed for providerId='{}', recordId='{}'", providerId, recordId); } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test(expected = RecordDoesNotExistException.class) public void testDeleteCloudId() throws Exception { dataProviderDao.createDataProvider("test21", new DataProviderProperties()); CloudId cId = service.createCloudId("test21", "test21"); service.deleteCloudId(cId.getId()); service.getCloudId(cId.getLocalId().getProviderId(), cId.getLocalId() .getRecordId()); }
@Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test(expected = IllegalArgumentException.class) public void testFindAclWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.findAclObjectIdentity(newAoi); }
@Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentity: objectIdentity: " + objectId); } Row row = session .execute(QueryBuilder.select().all().from(keyspace, AOI_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)) .one(); AclObjectIdentity objectIdentity = convertToAclObjectIdentity(row, true); if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentity: objectIdentity: " + objectIdentity); } return objectIdentity; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = CloudIdDoesNotExistException.class) public void testDeleteCloudIdException() throws Exception { service.deleteCloudId("test"); }
@Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public List<CloudId> deleteCloudId(String cloudId) throws DatabaseConnectionException, CloudIdDoesNotExistException { LOGGER.info("deleteCloudId() deleting cloudId='{}' ...", cloudId); if (cloudIdDao.searchById(cloudId).isEmpty()) { LOGGER.warn("CloudIdDoesNotExistException for cloudId='{}'", cloudId); throw new CloudIdDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_DOES_NOT_EXIST.getErrorInfo(cloudId))); } List<CloudId> localIds = cloudIdDao.searchAll(cloudId); for (CloudId cId : localIds) { localIdDao.delete(cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); cloudIdDao.delete(cloudId, cId.getLocalId().getProviderId(), cId.getLocalId().getRecordId()); } LOGGER.info("CloudId deleted for cloudId='{}'", cloudId); return localIds; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test @Ignore public void createCloudIdCollisonTest() throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, RecordDatasetEmptyException, CloudIdDoesNotExistException, CloudIdAlreadyExistException { final Map<String, String> map = new HashMap<String, String>(); dataProviderDao.createDataProvider("testprovider", new DataProviderProperties()); for (BigInteger bigCounter = BigInteger.ONE; bigCounter .compareTo(new BigInteger("5000000")) < 0; bigCounter = bigCounter .add(BigInteger.ONE)) { final String counterString = bigCounter.toString(32); final String encodedId = service.createCloudId("testprovider") .getId(); if (map.containsKey(encodedId)) { throw new RuntimeException("bigCounter: " + bigCounter + " | counterString: " + counterString + " | encodedId:" + encodedId + " == collision with ==> " + map.get(encodedId)); } else { map.put(encodedId, "bigCounter: " + bigCounter + " | counterString: " + counterString + " | encodedId:" + encodedId); } } }
@Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId = recordInfo[0]; LOGGER.info("createCloudId() creating cloudId providerId={}", providerId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId={}", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } String recordId = recordInfo.length > 1 ? recordInfo[1] : IdGenerator.timeEncode(providerId); LOGGER.info("createCloudId() creating cloudId providerId='{}', recordId='{}'", providerId, recordId); if (!localIdDao.searchById(providerId, recordId).isEmpty()) { LOGGER.warn("RecordExistsException for providerId={}, recordId={}", providerId, recordId); throw new RecordExistsException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_EXISTS.getHttpCode(), IdentifierErrorTemplate.RECORD_EXISTS.getErrorInfo(providerId, recordId))); } String id = IdGenerator.encodeWithSha256AndBase32("/" + providerId + "/" + recordId); List<CloudId> cloudIds = cloudIdDao.insert(false, id, providerId, recordId); localIdDao.insert(providerId, recordId, id); CloudId cloudId = new CloudId(); cloudId.setId(cloudIds.get(0).getId()); LocalId lId = new LocalId(); lId.setProviderId(providerId); lId.setRecordId(recordId); cloudId.setLocalId(lId); return cloudId; }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId = recordInfo[0]; LOGGER.info("createCloudId() creating cloudId providerId={}", providerId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId={}", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } String recordId = recordInfo.length > 1 ? recordInfo[1] : IdGenerator.timeEncode(providerId); LOGGER.info("createCloudId() creating cloudId providerId='{}', recordId='{}'", providerId, recordId); if (!localIdDao.searchById(providerId, recordId).isEmpty()) { LOGGER.warn("RecordExistsException for providerId={}, recordId={}", providerId, recordId); throw new RecordExistsException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_EXISTS.getHttpCode(), IdentifierErrorTemplate.RECORD_EXISTS.getErrorInfo(providerId, recordId))); } String id = IdGenerator.encodeWithSha256AndBase32("/" + providerId + "/" + recordId); List<CloudId> cloudIds = cloudIdDao.insert(false, id, providerId, recordId); localIdDao.insert(providerId, recordId, id); CloudId cloudId = new CloudId(); cloudId.setId(cloudIds.get(0).getId()); LocalId lId = new LocalId(); lId.setProviderId(providerId); lId.setRecordId(recordId); cloudId.setLocalId(lId); return cloudId; } }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId = recordInfo[0]; LOGGER.info("createCloudId() creating cloudId providerId={}", providerId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId={}", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } String recordId = recordInfo.length > 1 ? recordInfo[1] : IdGenerator.timeEncode(providerId); LOGGER.info("createCloudId() creating cloudId providerId='{}', recordId='{}'", providerId, recordId); if (!localIdDao.searchById(providerId, recordId).isEmpty()) { LOGGER.warn("RecordExistsException for providerId={}, recordId={}", providerId, recordId); throw new RecordExistsException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_EXISTS.getHttpCode(), IdentifierErrorTemplate.RECORD_EXISTS.getErrorInfo(providerId, recordId))); } String id = IdGenerator.encodeWithSha256AndBase32("/" + providerId + "/" + recordId); List<CloudId> cloudIds = cloudIdDao.insert(false, id, providerId, recordId); localIdDao.insert(providerId, recordId, id); CloudId cloudId = new CloudId(); cloudId.setId(cloudIds.get(0).getId()); LocalId lId = new LocalId(); lId.setProviderId(providerId); lId.setRecordId(recordId); cloudId.setLocalId(lId); return cloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId = recordInfo[0]; LOGGER.info("createCloudId() creating cloudId providerId={}", providerId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId={}", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } String recordId = recordInfo.length > 1 ? recordInfo[1] : IdGenerator.timeEncode(providerId); LOGGER.info("createCloudId() creating cloudId providerId='{}', recordId='{}'", providerId, recordId); if (!localIdDao.searchById(providerId, recordId).isEmpty()) { LOGGER.warn("RecordExistsException for providerId={}, recordId={}", providerId, recordId); throw new RecordExistsException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_EXISTS.getHttpCode(), IdentifierErrorTemplate.RECORD_EXISTS.getErrorInfo(providerId, recordId))); } String id = IdGenerator.encodeWithSha256AndBase32("/" + providerId + "/" + recordId); List<CloudId> cloudIds = cloudIdDao.insert(false, id, providerId, recordId); localIdDao.insert(providerId, recordId, id); CloudId cloudId = new CloudId(); cloudId.setId(cloudIds.get(0).getId()); LocalId lId = new LocalId(); lId.setProviderId(providerId); lId.setRecordId(recordId); cloudId.setLocalId(lId); return cloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
CassandraUniqueIdentifierService implements UniqueIdentifierService { @Override public CloudId createCloudId(String... recordInfo) throws DatabaseConnectionException, RecordExistsException, ProviderDoesNotExistException, CloudIdAlreadyExistException { LOGGER.info("createCloudId() creating cloudId"); String providerId = recordInfo[0]; LOGGER.info("createCloudId() creating cloudId providerId={}", providerId); if (dataProviderDao.getProvider(providerId) == null) { LOGGER.warn("ProviderDoesNotExistException for providerId={}", providerId); throw new ProviderDoesNotExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getHttpCode(), IdentifierErrorTemplate.PROVIDER_DOES_NOT_EXIST.getErrorInfo(providerId))); } String recordId = recordInfo.length > 1 ? recordInfo[1] : IdGenerator.timeEncode(providerId); LOGGER.info("createCloudId() creating cloudId providerId='{}', recordId='{}'", providerId, recordId); if (!localIdDao.searchById(providerId, recordId).isEmpty()) { LOGGER.warn("RecordExistsException for providerId={}, recordId={}", providerId, recordId); throw new RecordExistsException(new IdentifierErrorInfo( IdentifierErrorTemplate.RECORD_EXISTS.getHttpCode(), IdentifierErrorTemplate.RECORD_EXISTS.getErrorInfo(providerId, recordId))); } String id = IdGenerator.encodeWithSha256AndBase32("/" + providerId + "/" + recordId); List<CloudId> cloudIds = cloudIdDao.insert(false, id, providerId, recordId); localIdDao.insert(providerId, recordId, id); CloudId cloudId = new CloudId(); cloudId.setId(cloudIds.get(0).getId()); LocalId lId = new LocalId(); lId.setProviderId(providerId); lId.setRecordId(recordId); cloudId.setLocalId(lId); return cloudId; } CassandraUniqueIdentifierService(CassandraCloudIdDAO cloudIdDao, CassandraLocalIdDAO localIdDao, CassandraDataProviderDAO dataProviderDao); @Override CloudId createCloudId(String... recordInfo); @Override CloudId getCloudId(String providerId, String recordId); @Override List<CloudId> getLocalIdsByCloudId(String cloudId); @Override List<CloudId> getLocalIdsByProvider(String providerId, String start, int end); @Override List<CloudId> getCloudIdsByProvider(String providerId, String startRecordId, int limit); @Override CloudId createIdMapping(String cloudId, String providerId, String recordId); @Override void removeIdMapping(String providerId, String recordId); @Override List<CloudId> deleteCloudId(String cloudId); @Override String getHostList(); @Override String getKeyspace(); @Override String getPort(); @Override CloudId createIdMapping(String cloudId, String providerId); }
@Test(expected = CloudIdAlreadyExistException.class) public void insert_tryInsertTheSameContentTwice_ThrowsCloudIdAlreadyExistException() throws Exception { final String providerId = "providerId"; final String recordId = "recordId"; final String id = "id"; service.insert(true, id, providerId, recordId); service.insert(true, id, providerId, recordId); }
public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row = rs.one(); if (!row.getBool("[applied]")) { throw new CloudIdAlreadyExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getErrorInfo(args[0]))); } } else { dbService.getSession().execute(insertStatement.bind(args[0], args[1], args[2])); } } catch (NoHostAvailableException e) { throw new DatabaseConnectionException(new IdentifierErrorInfo( IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getHttpCode(), IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getErrorInfo(hostList, port, e.getMessage()))); } CloudId cId = new CloudId(); LocalId lId = new LocalId(); lId.setProviderId(args[1]); lId.setRecordId(args[2]); cId.setLocalId(lId); cId.setId(args[0]); List<CloudId> cIds = new ArrayList<>(); cIds.add(cId); return cIds; }
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row = rs.one(); if (!row.getBool("[applied]")) { throw new CloudIdAlreadyExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getErrorInfo(args[0]))); } } else { dbService.getSession().execute(insertStatement.bind(args[0], args[1], args[2])); } } catch (NoHostAvailableException e) { throw new DatabaseConnectionException(new IdentifierErrorInfo( IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getHttpCode(), IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getErrorInfo(hostList, port, e.getMessage()))); } CloudId cId = new CloudId(); LocalId lId = new LocalId(); lId.setProviderId(args[1]); lId.setRecordId(args[2]); cId.setLocalId(lId); cId.setId(args[0]); List<CloudId> cIds = new ArrayList<>(); cIds.add(cId); return cIds; } }
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row = rs.one(); if (!row.getBool("[applied]")) { throw new CloudIdAlreadyExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getErrorInfo(args[0]))); } } else { dbService.getSession().execute(insertStatement.bind(args[0], args[1], args[2])); } } catch (NoHostAvailableException e) { throw new DatabaseConnectionException(new IdentifierErrorInfo( IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getHttpCode(), IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getErrorInfo(hostList, port, e.getMessage()))); } CloudId cId = new CloudId(); LocalId lId = new LocalId(); lId.setProviderId(args[1]); lId.setRecordId(args[2]); cId.setLocalId(lId); cId.setId(args[0]); List<CloudId> cIds = new ArrayList<>(); cIds.add(cId); return cIds; } CassandraCloudIdDAO(CassandraConnectionProvider dbService); }
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row = rs.one(); if (!row.getBool("[applied]")) { throw new CloudIdAlreadyExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getErrorInfo(args[0]))); } } else { dbService.getSession().execute(insertStatement.bind(args[0], args[1], args[2])); } } catch (NoHostAvailableException e) { throw new DatabaseConnectionException(new IdentifierErrorInfo( IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getHttpCode(), IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getErrorInfo(hostList, port, e.getMessage()))); } CloudId cId = new CloudId(); LocalId lId = new LocalId(); lId.setProviderId(args[1]); lId.setRecordId(args[2]); cId.setLocalId(lId); cId.setId(args[0]); List<CloudId> cIds = new ArrayList<>(); cIds.add(cId); return cIds; } CassandraCloudIdDAO(CassandraConnectionProvider dbService); List<CloudId> searchById(String... args); List<CloudId> searchAll(String args); List<CloudId> insert(boolean insertOnlyIfNoExist, String... args); void delete(String... args); String getHostList(); String getKeyspace(); String getPort(); }
CassandraCloudIdDAO { public List<CloudId> insert(boolean insertOnlyIfNoExist, String... args) throws DatabaseConnectionException, CloudIdAlreadyExistException { ResultSet rs = null; try { if (insertOnlyIfNoExist) { rs = dbService.getSession().execute(insertIfNoExistsStatement.bind(args[0], args[1], args[2])); Row row = rs.one(); if (!row.getBool("[applied]")) { throw new CloudIdAlreadyExistException(new IdentifierErrorInfo( IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getHttpCode(), IdentifierErrorTemplate.CLOUDID_ALREADY_EXIST.getErrorInfo(args[0]))); } } else { dbService.getSession().execute(insertStatement.bind(args[0], args[1], args[2])); } } catch (NoHostAvailableException e) { throw new DatabaseConnectionException(new IdentifierErrorInfo( IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getHttpCode(), IdentifierErrorTemplate.DATABASE_CONNECTION_ERROR.getErrorInfo(hostList, port, e.getMessage()))); } CloudId cId = new CloudId(); LocalId lId = new LocalId(); lId.setProviderId(args[1]); lId.setRecordId(args[2]); cId.setLocalId(lId); cId.setId(args[0]); List<CloudId> cIds = new ArrayList<>(); cIds.add(cId); return cIds; } CassandraCloudIdDAO(CassandraConnectionProvider dbService); List<CloudId> searchById(String... args); List<CloudId> searchAll(String args); List<CloudId> insert(boolean insertOnlyIfNoExist, String... args); void delete(String... args); String getHostList(); String getKeyspace(); String getPort(); }
@Test @Parameters({"uis/,uis","uis,uis","uis public void shouldGetUrlWithoutSlashAtTheEnd(String inputSuffix, String expectedSuffix) { StaticUrlProvider provider = new StaticUrlProvider(URL_PREFIX + inputSuffix); String result = provider.getBaseUrl(); assertThat(result,is(URL_PREFIX + expectedSuffix)); }
public String getBaseUrl() { return baseUrl; }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } StaticUrlProvider(final String serviceUrl); }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } StaticUrlProvider(final String serviceUrl); String getBaseUrl(); }
StaticUrlProvider implements UrlProvider { public String getBaseUrl() { return baseUrl; } StaticUrlProvider(final String serviceUrl); String getBaseUrl(); }
@Test public void shouldSerializeTheDateSuccessfully() throws ParseException { Date date = dateAdapter.unmarshal(DATE_STRING); assertEquals(cal.getTime(), date); }
@Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
@Test public void shouldDeSerializeTheDateSuccessfully() { assertEquals(dateAdapter.marshal(cal.getTime()), DATE_STRING); }
@Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
@Test(expected = ParseException.class) public void shouldThrowParsingException() throws ParseException { String unParsedDateString = "2017-11-23"; dateAdapter.unmarshal(unParsedDateString); }
@Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
@Test public void shouldCreateNullDateInCaseEmptyOrNull() throws ParseException { assertNull(dateAdapter.unmarshal(null)); assertNull(dateAdapter.unmarshal("")); }
@Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
DateAdapter extends XmlAdapter<String, Date> { @Override public Date unmarshal(String stringDate) throws ParseException { if (stringDate == null || stringDate.isEmpty()) { return null; } try { Date date = GregorianCalendar.getInstance().getTime(); if(date == null){ throw new ParseException("Cannot parse the date. The accepted date format is "+FORMAT, 0); } return date; } catch (ParseException e) { throw new ParseException(e.getMessage() + ". The accepted date format is "+FORMAT, e.getErrorOffset()); } } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
@Test(expected = RuntimeException.class) public void shouldThrowRunTimeException() { dateAdapter.marshal(null); }
@Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
DateAdapter extends XmlAdapter<String, Date> { @Override public String marshal(Date date) { if (date == null) { throw new RuntimeException("The revision creation Date shouldn't be null"); } return FORMATTER.format(date); } @Override String marshal(Date date); @Override Date unmarshal(String stringDate); }
@Test public void testFindAclChildrenForNotExistingAcl() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); List<AclObjectIdentity> children = service .findAclObjectIdentityChildren(newAoi); assertTrue(children.isEmpty()); }
@Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = IllegalArgumentException.class) public void testFindNullAclChildren() { service.findAclObjectIdentityChildren(null); }
@Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = IllegalArgumentException.class) public void testFindAclChildrenWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.findAclObjectIdentityChildren(newAoi); }
@Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId) { assertAclObjectIdentity(objectId); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN findAclObjectIdentityChildren: objectIdentity: " + objectId); } ResultSet resultSet = session.execute(QueryBuilder.select().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", objectId.getRowId())).setConsistencyLevel(ConsistencyLevel.QUORUM)); List<AclObjectIdentity> result = new ArrayList<>(); for (Row row : resultSet.all()) { result.add(convertToAclObjectIdentity(row, false)); } if (LOG.isDebugEnabled()) { LOG.debug("END findAclObjectIdentityChildren: children: " + result); } return result; } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = Exception.class) public void shouldRetry5TimesBeforeFailing() { doThrow(Exception.class).when(subTaskInfoDAO).removeNotifications(eq(TASK_ID)); removerImpl.removeNotifications(TASK_ID); verify(subTaskInfoDAO, times(6)).removeNotifications((eq(TASK_ID))); }
@Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the logs."); throw e; } } } }
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the logs."); throw e; } } } } }
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the logs."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); }
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the logs."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
RemoverImpl implements Remover { @Override public void removeNotifications(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { subTaskInfoDAO.removeNotifications(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the logs. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the logs."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
@Test(expected = IllegalArgumentException.class) public void testUpdateNullAcl() { service.updateAcl(null, null); }
@Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = AclNotFoundException.class) public void testUpdateAclNotExisting() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); service.updateAcl(newAoi, new ArrayList<AclEntry>()); }
@Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries) throws AclNotFoundException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN updateAcl: aclObjectIdentity: " + aoi + ", entries: " + entries); } AclObjectIdentity persistedAoi = findAclObjectIdentity(aoi); if (persistedAoi == null) { throw new AclNotFoundException("Object identity '" + aoi + "' does not exist"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })); batch.add(QueryBuilder.delete().all().from(keyspace, ACL_TABLE).where(QueryBuilder.eq("id", aoi.getRowId()))); boolean parentChanged = false; if (!(persistedAoi.getParentRowId() == null ? aoi.getParentRowId() == null : persistedAoi.getParentRowId().equals(aoi.getParentRowId()))) { parentChanged = true; if (persistedAoi.getParentRowId() != null) { batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.eq("id", persistedAoi.getParentRowId())) .and(QueryBuilder.eq("childId", aoi.getRowId()))).setConsistencyLevel(ConsistencyLevel.QUORUM); } } session.execute(batch); batch = QueryBuilder.batch(); boolean executeBatch = false; if (entries != null && !entries.isEmpty()) { for (AclEntry entry : entries) { batch.add(QueryBuilder.insertInto(keyspace, ACL_TABLE).values(ACL_KEYS, new Object[] { aoi.getRowId(), entry.getOrder(), entry.getSid(), entry.getMask(), entry.isSidPrincipal(), entry.isGranting(), entry.isAuditSuccess(), entry.isAuditFailure() })); } executeBatch = true; } if (parentChanged) { if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } executeBatch = true; } if (executeBatch) { session.execute(batch); } if (LOG.isDebugEnabled()) { LOG.debug("END updateAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = IllegalArgumentException.class) public void testSaveNullAcl() { service.saveAcl(null); }
@Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = AclAlreadyExistsException.class) public void testSaveAclAlreadyExisting() { AclObjectIdentity newAoi = createDefaultTestAOI(); service.saveAcl(newAoi); service.saveAcl(newAoi); }
@Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = IllegalArgumentException.class) public void testDeleteNullAcl() { service.deleteAcls(null); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test public void testDeleteAclNotExisting() { AclObjectIdentity newAoi = new AclObjectIdentity(); newAoi.setId("invalid"); newAoi.setObjectClass(aoi_class); newAoi.setOwnerId(sid1); service.deleteAcls(Arrays.asList(new AclObjectIdentity[] { newAoi })); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = IllegalArgumentException.class) public void testDeleteEmptyAclList() { service.deleteAcls(new ArrayList<AclObjectIdentity>()); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = IllegalArgumentException.class) public void testSaveAclWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.saveAcl(newAoi); }
@Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void saveAcl(AclObjectIdentity aoi) throws AclAlreadyExistsException { assertAclObjectIdentity(aoi); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN saveAcl: aclObjectIdentity: " + aoi); } if (findAclObjectIdentity(aoi) != null) { throw new AclAlreadyExistsException("Object identity '" + aoi + "' already exists"); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.insertInto(keyspace, AOI_TABLE).values(AOI_KEYS, new Object[] { aoi.getRowId(), aoi.getId(), aoi.getObjectClass(), aoi.isEntriesInheriting(), aoi.getOwnerId(), aoi.isOwnerPrincipal(), aoi.getParentObjectId(), aoi.getParentObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); if (aoi.getParentRowId() != null) { batch.add(QueryBuilder.insertInto(keyspace, CHILDREN_TABLE).values(CHILD_KEYS, new Object[] { aoi.getParentRowId(), aoi.getRowId(), aoi.getId(), aoi.getObjectClass() })) .setConsistencyLevel(ConsistencyLevel.QUORUM); } session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END saveAcl"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test(expected = IllegalArgumentException.class) public void testDeleteAclWithNullValues() { AclObjectIdentity newAoi = new AclObjectIdentity(); service.deleteAcls(Arrays.asList(new AclObjectIdentity[] { newAoi })); }
@Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
CassandraAclRepository implements AclRepository { @Override public void deleteAcls(List<AclObjectIdentity> objectIdsToDelete) { assertAclObjectIdentityList(objectIdsToDelete); if (LOG.isDebugEnabled()) { LOG.debug("BEGIN deleteAcls: objectIdsToDelete: " + objectIdsToDelete); } List<String> ids = new ArrayList<>(objectIdsToDelete.size()); for (AclObjectIdentity entry : objectIdsToDelete) { ids.add(entry.getRowId()); } Batch batch = QueryBuilder.batch(); batch.add(QueryBuilder.delete().all().from(keyspace, AOI_TABLE).where(QueryBuilder.in("id", ids.toArray()))); batch.add(QueryBuilder.delete().all().from(keyspace, CHILDREN_TABLE) .where(QueryBuilder.in("id", ids.toArray()))).setConsistencyLevel(ConsistencyLevel.QUORUM); session.execute(batch); if (LOG.isDebugEnabled()) { LOG.debug("END deleteAcls"); } } CassandraAclRepository(CassandraConnectionProvider provider, boolean initSchema); CassandraAclRepository(Session session, String keyspace); CassandraAclRepository(Session session, String keyspace, boolean initSchema); @Override Map<AclObjectIdentity, Set<AclEntry>> findAcls(List<AclObjectIdentity> objectIdsToLookup); @Override AclObjectIdentity findAclObjectIdentity(AclObjectIdentity objectId); @Override List<AclObjectIdentity> findAclObjectIdentityChildren(AclObjectIdentity objectId); @Override void deleteAcls(List<AclObjectIdentity> objectIdsToDelete); @Override void saveAcl(AclObjectIdentity aoi); @Override void updateAcl(AclObjectIdentity aoi, List<AclEntry> entries); void createAoisTable(); void createChilrenTable(); void createAclsTable(); }
@Test public void currentBucketShouldBeNull() { Bucket bucket = bucketsHandler.getCurrentBucket(BUCKETS_TABLE_NAME, "sampleObject"); Assert.assertNull(bucket); }
public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : rows.get(rows.size() - 1); if (row != null) { return new Bucket( row.getString(OBJECT_ID_COLUMN_NAME), row.getUUID(BUCKET_ID_COLUMN_NAME).toString(), row.getLong(ROWS_COUNT_COLUMN_NAME)); } return null; }
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : rows.get(rows.size() - 1); if (row != null) { return new Bucket( row.getString(OBJECT_ID_COLUMN_NAME), row.getUUID(BUCKET_ID_COLUMN_NAME).toString(), row.getLong(ROWS_COUNT_COLUMN_NAME)); } return null; } }
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : rows.get(rows.size() - 1); if (row != null) { return new Bucket( row.getString(OBJECT_ID_COLUMN_NAME), row.getUUID(BUCKET_ID_COLUMN_NAME).toString(), row.getLong(ROWS_COUNT_COLUMN_NAME)); } return null; } BucketsHandler(Session session); }
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : rows.get(rows.size() - 1); if (row != null) { return new Bucket( row.getString(OBJECT_ID_COLUMN_NAME), row.getUUID(BUCKET_ID_COLUMN_NAME).toString(), row.getLong(ROWS_COUNT_COLUMN_NAME)); } return null; } BucketsHandler(Session session); Bucket getCurrentBucket(String bucketsTableName, String objectId); void increaseBucketCount(String bucketsTableName, Bucket bucket); void decreaseBucketCount(String bucketsTableName, Bucket bucket); List<Bucket> getAllBuckets(String bucketsTableName, String objectId); Bucket getBucket(String bucketsTableName, Bucket bucket); Bucket getNextBucket(String bucketsTableName, String objectId); Bucket getNextBucket(String bucketsTableName, String objectId, Bucket bucket); void removeBucket(String bucketsTableName, Bucket bucket); }
BucketsHandler { public Bucket getCurrentBucket(String bucketsTableName, String objectId) { String query = "SELECT object_id, bucket_id, rows_count FROM " + bucketsTableName + " WHERE object_id = '" + objectId + "';"; ResultSet rs = session.execute(query); List<Row> rows = rs.all(); Row row = rows.isEmpty() ? null : rows.get(rows.size() - 1); if (row != null) { return new Bucket( row.getString(OBJECT_ID_COLUMN_NAME), row.getUUID(BUCKET_ID_COLUMN_NAME).toString(), row.getLong(ROWS_COUNT_COLUMN_NAME)); } return null; } BucketsHandler(Session session); Bucket getCurrentBucket(String bucketsTableName, String objectId); void increaseBucketCount(String bucketsTableName, Bucket bucket); void decreaseBucketCount(String bucketsTableName, Bucket bucket); List<Bucket> getAllBuckets(String bucketsTableName, String objectId); Bucket getBucket(String bucketsTableName, Bucket bucket); Bucket getNextBucket(String bucketsTableName, String objectId); Bucket getNextBucket(String bucketsTableName, String objectId, Bucket bucket); void removeBucket(String bucketsTableName, Bucket bucket); static final String OBJECT_ID_COLUMN_NAME; static final String BUCKET_ID_COLUMN_NAME; static final String ROWS_COUNT_COLUMN_NAME; }
@Test public void shouldSuccessfullyRemoveErrors() { doNothing().when(taskErrorDAO).removeErrors(eq(TASK_ID)); removerImpl.removeErrorReports(TASK_ID); verify(taskErrorDAO, times(1)).removeErrors((eq(TASK_ID))); }
@Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
@Test public void shouldCreateNewBucket() { Bucket bucket = new Bucket("sampleObjectId", new com.eaio.uuid.UUID().toString(), 0); bucketsHandler.increaseBucketCount(BUCKETS_TABLE_NAME, bucket); assertResults(bucket, 1); }
public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHandler(Session session); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHandler(Session session); Bucket getCurrentBucket(String bucketsTableName, String objectId); void increaseBucketCount(String bucketsTableName, Bucket bucket); void decreaseBucketCount(String bucketsTableName, Bucket bucket); List<Bucket> getAllBuckets(String bucketsTableName, String objectId); Bucket getBucket(String bucketsTableName, Bucket bucket); Bucket getNextBucket(String bucketsTableName, String objectId); Bucket getNextBucket(String bucketsTableName, String objectId, Bucket bucket); void removeBucket(String bucketsTableName, Bucket bucket); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHandler(Session session); Bucket getCurrentBucket(String bucketsTableName, String objectId); void increaseBucketCount(String bucketsTableName, Bucket bucket); void decreaseBucketCount(String bucketsTableName, Bucket bucket); List<Bucket> getAllBuckets(String bucketsTableName, String objectId); Bucket getBucket(String bucketsTableName, Bucket bucket); Bucket getNextBucket(String bucketsTableName, String objectId); Bucket getNextBucket(String bucketsTableName, String objectId, Bucket bucket); void removeBucket(String bucketsTableName, Bucket bucket); static final String OBJECT_ID_COLUMN_NAME; static final String BUCKET_ID_COLUMN_NAME; static final String ROWS_COUNT_COLUMN_NAME; }
@Test public void shouldUpdateCounterForExistingBucket() { Bucket bucket = new Bucket("sampleObjectId", new com.eaio.uuid.UUID().toString(), 0); bucketsHandler.increaseBucketCount(BUCKETS_TABLE_NAME, bucket); bucketsHandler.increaseBucketCount(BUCKETS_TABLE_NAME, bucket); assertResults(bucket, 2); }
public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHandler(Session session); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHandler(Session session); Bucket getCurrentBucket(String bucketsTableName, String objectId); void increaseBucketCount(String bucketsTableName, Bucket bucket); void decreaseBucketCount(String bucketsTableName, Bucket bucket); List<Bucket> getAllBuckets(String bucketsTableName, String objectId); Bucket getBucket(String bucketsTableName, Bucket bucket); Bucket getNextBucket(String bucketsTableName, String objectId); Bucket getNextBucket(String bucketsTableName, String objectId, Bucket bucket); void removeBucket(String bucketsTableName, Bucket bucket); }
BucketsHandler { public void increaseBucketCount(String bucketsTableName, Bucket bucket) { String query = "UPDATE " + bucketsTableName + " SET rows_count = rows_count + 1 WHERE object_id = '" + bucket.getObjectId() + "' AND bucket_id = " + UUID.fromString(bucket.getBucketId()) + ";"; session.execute(query); } BucketsHandler(Session session); Bucket getCurrentBucket(String bucketsTableName, String objectId); void increaseBucketCount(String bucketsTableName, Bucket bucket); void decreaseBucketCount(String bucketsTableName, Bucket bucket); List<Bucket> getAllBuckets(String bucketsTableName, String objectId); Bucket getBucket(String bucketsTableName, Bucket bucket); Bucket getNextBucket(String bucketsTableName, String objectId); Bucket getNextBucket(String bucketsTableName, String objectId, Bucket bucket); void removeBucket(String bucketsTableName, Bucket bucket); static final String OBJECT_ID_COLUMN_NAME; static final String BUCKET_ID_COLUMN_NAME; static final String ROWS_COUNT_COLUMN_NAME; }
@Test public void testCountStatisticsSuccessfully() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] fileData = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, fileData, new HashMap<String, String>(), new Revision()); List<NodeStatistics> generated = new RecordStatisticsGenerator(new String(fileData)).getStatistics(); statisticsBolt.execute(anchorTuple, tuple); assertSuccess(1); assertDataStoring(generated); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final Logger LOGGER; }
@Test public void testAggregatedCountStatisticsSuccessfully() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); Tuple anchorTuple2 = mock(TupleImpl.class); byte[] fileData = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, fileData, new HashMap<String, String>(), new Revision()); List<NodeStatistics> generated = new RecordStatisticsGenerator(new String(fileData)).getStatistics(); byte[] fileData2 = Files.readAllBytes(Paths.get("src/test/resources/example2.xml")); StormTaskTuple tuple2 = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, fileData2, new HashMap<String, String>(), new Revision()); List<NodeStatistics> generated2 = new RecordStatisticsGenerator(new String(fileData2)).getStatistics(); statisticsBolt.execute(anchorTuple, tuple); statisticsBolt.execute(anchorTuple2, tuple2); assertSuccess(2); assertDataStoring(generated, generated2); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final Logger LOGGER; }
@Test public void testCountStatisticsFailed() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] fileData = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); fileData[0] = 'X'; StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, fileData, new HashMap<String, String>(), new Revision()); statisticsBolt.execute(anchorTuple, tuple); assertFailure(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
StatisticsBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { if (!statsAlreadyCalculated(stormTaskTuple)) { LOGGER.info("Calculating file statistics for {}", stormTaskTuple); countStatistics(stormTaskTuple); markRecordStatsAsCalculated(stormTaskTuple); } else { LOGGER.info("File stats will NOT be calculated because if was already done in the previous attempt"); } stormTaskTuple.setFileData((byte[]) null); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception e) { emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Statistics for the given file could not be prepared.", StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } StatisticsBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final Logger LOGGER; }
@Test public void validateEdmInternalFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParameters("edm-internal", null), new Revision()); validationBolt.execute(anchorTuple, tuple); assertSuccessfulValidation(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final Logger LOGGER; }
@Test public void validateEdmInternalFileWithProvidedRootLocation() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParameters("edm-internal", "EDM-INTERNAL.xsd"), new Revision()); validationBolt.execute(anchorTuple, tuple); assertSuccessfulValidation(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final Logger LOGGER; }
@Test public void validateEdmExternalFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParameters("edm-external", null), new Revision()); validationBolt.execute(anchorTuple, tuple); assertSuccessfulValidation(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final Logger LOGGER; }
@Test public void validateEdmExternalOutOfOrderFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/edmExternalWithOutOfOrderElements.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParameters("edm-external", null), new Revision()); validationBolt.execute(anchorTuple, tuple); assertSuccessfulValidation(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final Logger LOGGER; }
@Test public void sendErrorNotificationWhenTheValidationFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParameters("edm-external", null), new Revision()); validationBolt.execute(anchorTuple, tuple); assertFailedValidation(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ValidationBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { reorderFileContent(stormTaskTuple); validateFileAndEmit(anchorTuple, stormTaskTuple); } catch (Exception e) { LOGGER.error("Validation Bolt error: {}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while validation. The full error :" + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ValidationBolt(Properties properties); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final Logger LOGGER; }
@Test(expected = Exception.class) public void shouldRetry5TimesBeforeFailingWhileRemovingErrorReports() { doThrow(Exception.class).when(taskErrorDAO).removeErrors(eq(TASK_ID)); removerImpl.removeErrorReports(TASK_ID); verify(taskErrorDAO, times(6)).removeErrors((eq(TASK_ID))); }
@Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
RemoverImpl implements Remover { @Override public void removeErrorReports(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { taskErrorDAO.removeErrors(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the error reports. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("Error while removing the error reports."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
@Test public void nodeContentsSizeShouldBeSmallerThanMaximumSize() throws Exception { String fileContent = readFile("src/test/resources/BigContent.xml"); RecordStatisticsGenerator xmlParser = new RecordStatisticsGenerator(fileContent); List<NodeStatistics> nodeModelList = xmlParser.getStatistics(); for (NodeStatistics nodeModel : nodeModelList) { assertTrue(nodeModel.getValue().length() <= MAX_SIZE); } }
public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayList<>(nodeStatistics.values()); }
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayList<>(nodeStatistics.values()); } }
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayList<>(nodeStatistics.values()); } RecordStatisticsGenerator(String fileContent); }
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayList<>(nodeStatistics.values()); } RecordStatisticsGenerator(String fileContent); List<NodeStatistics> getStatistics(); }
RecordStatisticsGenerator { public List<NodeStatistics> getStatistics() throws SAXException, IOException, ParserConfigurationException { Document doc = getParsedDocument(); doc.getDocumentElement().normalize(); Node root = doc.getDocumentElement(); addRootToNodeList(root); prepareNodeStatistics(root); return new ArrayList<>(nodeStatistics.values()); } RecordStatisticsGenerator(String fileContent); List<NodeStatistics> getStatistics(); }
@Test public void shouldEnrichTheFileSuccessfullyAndSendItToTheNextBolt() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, "{\"textResourceMetadata\":{\"containsText\":false,\"resolution\":10,\"mimeType\":\"text/xml\",\"resourceUrl\":\"http: stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, String.valueOf(1)); assertEquals(4, stormTaskTuple.getParameters().size()); edmEnrichmentBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, times(1)).emit(eq(anchorTuple), captor.capture()); Values values = captor.getValue(); Map<String, String> parameters = (Map) values.get(4); assertNotNull(parameters); assertEquals(6, parameters.size()); assertNull(parameters.get(PluginParameterKeys.RESOURCE_METADATA)); assertEquals("sourceCloudId", parameters.get(PluginParameterKeys.CLOUD_ID)); assertEquals("sourceRepresentationName", parameters.get(PluginParameterKeys.REPRESENTATION_NAME)); assertEquals("sourceVersion", parameters.get(PluginParameterKeys.REPRESENTATION_VERSION)); } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final String NO_RESOURCES_DETAILED_MESSAGE; }
@Test public void shouldEnrichTheFileSuccessfullyOnMultipleBatchesAndSendItToTheNextBolt() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, "{\"textResourceMetadata\":{\"containsText\":false,\"resolution\":10,\"mimeType\":\"text/xml\",\"resourceUrl\":\"http: int resourceLinksCount = 10; stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, String.valueOf(resourceLinksCount)); assertEquals(4, stormTaskTuple.getParameters().size()); for (int i = 1; i <= resourceLinksCount; i++) { edmEnrichmentBolt.execute(anchorTuple, stormTaskTuple); if (i < resourceLinksCount) assertEquals(i, edmEnrichmentBolt.cache.get(FILE_URL).getCount()); } verify(outputCollector, times(1)).emit(eq(anchorTuple), captor.capture()); Values values = captor.getValue(); Map<String, String> parameters = (Map) values.get(4); assertNotNull(parameters); assertEquals(6, parameters.size()); assertNull(parameters.get(PluginParameterKeys.RESOURCE_METADATA)); assertEquals("sourceCloudId", parameters.get(PluginParameterKeys.CLOUD_ID)); assertEquals("sourceRepresentationName", parameters.get(PluginParameterKeys.REPRESENTATION_NAME)); assertEquals("sourceVersion", parameters.get(PluginParameterKeys.REPRESENTATION_VERSION)); } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final String NO_RESOURCES_DETAILED_MESSAGE; }
@Test public void shouldForwardTheTupleWhenNoResourceLinkFound() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); edmEnrichmentBolt.execute(anchorTuple, stormTaskTuple); int expectedParametersSize = 2; Map<String, String> initialTupleParameters = stormTaskTuple.getParameters(); assertEquals(expectedParametersSize, initialTupleParameters.size()); verify(outputCollector, Mockito.times(1)).emit(eq(anchorTuple), captor.capture()); Values value = captor.getValue(); Map<String, String> parametersAfterExecution = (Map) value.get(4); assertNotNull(parametersAfterExecution); assertEquals(expectedParametersSize, parametersAfterExecution.size()); for (String key : parametersAfterExecution.keySet()) { assertTrue(initialTupleParameters.keySet().contains(key)); assertEquals(initialTupleParameters.get(key), parametersAfterExecution.get(key)); } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final String NO_RESOURCES_DETAILED_MESSAGE; }
@Test public void shouldLogTheExceptionAndSendItAsParameterToTheNextBolt() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); String brokenMetaData = "{\"textResourceMetadata\":{\"containsTe/xml\",\"resourceUrl\":\"RESOURCE_URL\",\"contentSize\":100,\"thumbnailTargetNames\":[\"TargetName1\",\"TargetName0\",\"TargetName2\"]}}"; stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, brokenMetaData); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, String.valueOf(1)); assertEquals(4, stormTaskTuple.getParameters().size()); edmEnrichmentBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, times(1)).emit(eq(anchorTuple), captor.capture()); Values values = captor.getValue(); Map<String, String> parameters = (Map) values.get(4); assertNotNull(parameters); assertEquals(8, parameters.size()); assertNotNull(parameters.get(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE)); assertNotNull(parameters.get(PluginParameterKeys.UNIFIED_ERROR_MESSAGE)); assertNull(parameters.get(PluginParameterKeys.RESOURCE_METADATA)); assertEquals("sourceCloudId", parameters.get(PluginParameterKeys.CLOUD_ID)); assertEquals("sourceRepresentationName", parameters.get(PluginParameterKeys.REPRESENTATION_NAME)); assertEquals("sourceVersion", parameters.get(PluginParameterKeys.REPRESENTATION_VERSION)); assertNotNull(parameters.get(PluginParameterKeys.UNIFIED_ERROR_MESSAGE)); assertNotNull(MEDIA_RESOURCE_EXCEPTION, parameters.get(PluginParameterKeys.UNIFIED_ERROR_MESSAGE)); } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
EDMEnrichmentBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { LOGGER.warn(NO_RESOURCES_DETAILED_MESSAGE); try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { EnrichedRdf enrichedRdf = deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream)); prepareStormTaskTuple(stormTaskTuple, enrichedRdf, NO_RESOURCES_DETAILED_MESSAGE); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (IOException | RdfDeserializationException | RdfSerializationException | MCSException ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } else { final String file = stormTaskTuple.getFileUrl(); TempEnrichedFile tempEnrichedFile = cache.get(file); try { if ((tempEnrichedFile == null) || (tempEnrichedFile.getTaskId() != stormTaskTuple.getTaskId())) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { tempEnrichedFile = new TempEnrichedFile(); tempEnrichedFile.setTaskId(stormTaskTuple.getTaskId()); tempEnrichedFile.setEnrichedRdf(deserializer.getRdfForResourceEnriching(IOUtils.toByteArray(stream))); } } tempEnrichedFile.addSourceTuple(anchorTuple); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA) != null) { EnrichedRdf enrichedRdf = enrichRdf(tempEnrichedFile.getEnrichedRdf(), stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_METADATA)); tempEnrichedFile.setEnrichedRdf(enrichedRdf); } String cachedErrorMessage = tempEnrichedFile.getExceptions(); cachedErrorMessage = buildErrorMessage(stormTaskTuple.getParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), cachedErrorMessage); tempEnrichedFile.setExceptions(cachedErrorMessage); } catch (Exception e) { LOGGER.error("problem while enrichment ", e); String currentException = tempEnrichedFile.getExceptions(); String exceptionMessage = "Exception while enriching the original edm file with resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + " because of: " + ExceptionUtils.getStackTrace(e); if (currentException.isEmpty()) tempEnrichedFile.setExceptions(exceptionMessage); else tempEnrichedFile.setExceptions(currentException + "," + exceptionMessage); } finally { tempEnrichedFile.increaseCount(); if (tempEnrichedFile.isTheLastResource(Integer.parseInt(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT)))) { try { LOGGER.info("The file {} was fully enriched and will be send to the next bolt", file); prepareStormTaskTuple(stormTaskTuple, tempEnrichedFile); cache.remove(file); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); } catch (Exception ex) { LOGGER.error("Error while serializing the enriched file: ", ex); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), ex.getMessage(), "Error while serializing the enriched file: " + ExceptionUtils.getStackTrace(ex), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } ackAllSourceTuplesForFile(tempEnrichedFile); } else { cache.put(file, tempEnrichedFile); } } } } EDMEnrichmentBolt(String mcsURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); static final String NO_RESOURCES_DETAILED_MESSAGE; }
@Test public void shouldSuccessfullyProcessTheResource() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":\"http: String resourceName = "RESOURCE_URL"; int thumbnailCount = 3; List<Thumbnail> thumbnailList = getThumbnails(thumbnailCount); AbstractResourceMetadata resourceMetadata = new TextResourceMetadata("text/xml", resourceName, 100L, false, 10, thumbnailList); ResourceExtractionResult resourceExtractionResult = new ResourceExtractionResultImpl(resourceMetadata, thumbnailList); when(mediaExtractor.performMediaExtraction(any(RdfResourceEntry.class), anyBoolean())).thenReturn(resourceExtractionResult); when(amazonClient.putObject(anyString(), any(InputStream.class), nullable(ObjectMetadata.class))).thenReturn(new PutObjectResult()); when(taskStatusChecker.hasKillFlag(eq(TASK_ID))).thenReturn(false); resourceProcessingBolt.execute(anchorTuple, stormTaskTuple); verify(amazonClient, Mockito.times(thumbnailCount)).putObject(anyString(), any(InputStream.class), any(ObjectMetadata.class)); verify(outputCollector, Mockito.times(1)).emit(eq(anchorTuple), captor.capture()); Values value = captor.getValue(); Map<String, String> parameters = (Map) value.get(4); assertNotNull(parameters); assertEquals(4, parameters.size()); assertNull(parameters.get(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldDropTheTaskAndStopProcessing() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":\"http: String resourceName = "RESOURCE_URL"; int thumbnailCount = 3; List<Thumbnail> thumbnailList = getThumbnails(thumbnailCount); AbstractResourceMetadata resourceMetadata = new TextResourceMetadata("text/xml", resourceName, 100L, false, 10, thumbnailList); ResourceExtractionResult resourceExtractionResult = new ResourceExtractionResultImpl(resourceMetadata, thumbnailList); when(mediaExtractor.performMediaExtraction(any(RdfResourceEntry.class), anyBoolean())).thenReturn(resourceExtractionResult); when(amazonClient.putObject(anyString(), any(InputStream.class), isNull(ObjectMetadata.class))).thenReturn(new PutObjectResult()); when(taskStatusChecker.hasKillFlag(eq(TASK_ID))).thenReturn(false).thenReturn(true); resourceProcessingBolt.execute(anchorTuple, stormTaskTuple); verify(amazonClient, Mockito.times(1)).putObject(anyString(), any(InputStream.class), any(ObjectMetadata.class)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldFormulateTheAggregateExceptionsWhenSavingToAmazonFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":\"http: String resourceName = "RESOURCE_URL"; int thumbNailCount = 3; List<Thumbnail> thumbnailList = getThumbnails(thumbNailCount); AbstractResourceMetadata resourceMetadata = new TextResourceMetadata("text/xml", resourceName, 100L, false, 10, thumbnailList); ResourceExtractionResult resourceExtractionResult = new ResourceExtractionResultImpl(resourceMetadata, thumbnailList); String errorMessage = "The error was thrown because of something"; when(mediaExtractor.performMediaExtraction(any(RdfResourceEntry.class), anyBoolean())).thenReturn(resourceExtractionResult); doThrow(new AmazonServiceException(errorMessage)).when(amazonClient).putObject( anyString(), any(InputStream.class), any(ObjectMetadata.class)); resourceProcessingBolt.execute(anchorTuple, stormTaskTuple); verify(amazonClient, Mockito.times(3)).putObject(anyString(), any(InputStream.class), any(ObjectMetadata.class)); verify(outputCollector, Mockito.times(1)).emit(eq(anchorTuple), captor.capture()); Values value = captor.getValue(); Map<String, String> parameters = (Map) value.get(4); assertNotNull(parameters); assertEquals(6, parameters.size()); assertNotNull(parameters.get(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE)); assertNotNull(parameters.get(PluginParameterKeys.UNIFIED_ERROR_MESSAGE)); assertEquals(thumbNailCount, StringUtils.countMatches(parameters.get(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE), errorMessage)); assertNull(parameters.get(PluginParameterKeys.RESOURCE_LINK_KEY)); assertNotNull(MEDIA_RESOURCE_EXCEPTION, parameters.get(PluginParameterKeys.UNIFIED_ERROR_MESSAGE)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldSendExceptionsWhenProcessingFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT, Integer.toString(5)); stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_LINK_KEY, "{\"resourceUrl\":\"http: doThrow(MediaExtractionException.class).when(mediaExtractor).performMediaExtraction(any(RdfResourceEntry.class), anyBoolean()); resourceProcessingBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, Mockito.times(1)).emit(eq(anchorTuple), captor.capture()); verify(amazonClient, Mockito.times(0)).putObject(anyString(), any(InputStream.class), isNull(ObjectMetadata.class)); Values value = captor.getValue(); Map<String, String> parameters = (Map) value.get(4); assertNotNull(parameters); assertEquals(5, parameters.size()); assertNotNull(parameters.get(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE)); assertNotNull(parameters.get(PluginParameterKeys.UNIFIED_ERROR_MESSAGE)); assertNull(parameters.get(PluginParameterKeys.RESOURCE_METADATA)); assertNull(parameters.get(PluginParameterKeys.RESOURCE_LINK_KEY)); assertNotNull(MEDIA_RESOURCE_EXCEPTION, parameters.get(PluginParameterKeys.UNIFIED_ERROR_MESSAGE)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldForwardTheTupleWhenNoResourceLinkFound() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); resourceProcessingBolt.execute(anchorTuple, stormTaskTuple); int expectedParametersSize = 2; assertEquals(expectedParametersSize, stormTaskTuple.getParameters().size()); verify(outputCollector, Mockito.times(1)).emit(eq(anchorTuple), captor.capture()); Values value = captor.getValue(); Map<String, String> parameters = (Map) value.get(4); assertNotNull(parameters); assertEquals(expectedParametersSize, parameters.size()); assertNull(parameters.get(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE)); assertNull(parameters.get(PluginParameterKeys.RESOURCE_METADATA)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ResourceProcessingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { LOGGER.info("Starting resource processing"); long processingStartTime = new Date().getTime(); StringBuilder exception = new StringBuilder(); if (stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINKS_COUNT) == null) { outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } else { try { RdfResourceEntry rdfResourceEntry = gson.fromJson(stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_LINK_KEY), RdfResourceEntry.class); ResourceExtractionResult resourceExtractionResult = mediaExtractor.performMediaExtraction(rdfResourceEntry, Boolean.parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.MAIN_THUMBNAIL_AVAILABLE))); if (resourceExtractionResult != null) { if (resourceExtractionResult.getMetadata() != null) stormTaskTuple.addParameter(PluginParameterKeys.RESOURCE_METADATA, gson.toJson(resourceExtractionResult.getMetadata())); storeThumbnails(stormTaskTuple, exception, resourceExtractionResult); } LOGGER.info("Resource processing finished in: {}ms", String.valueOf(Calendar.getInstance().getTimeInMillis() - processingStartTime)); } catch (Exception e) { LOGGER.error("Exception while processing the resource {}. The full error is:{} ", stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL), ExceptionUtils.getStackTrace(e)); buildErrorMessage(exception, "Exception while processing the resource: " + stormTaskTuple.getParameter(PluginParameterKeys.RESOURCE_URL) + ". The full error is: " + e.getMessage() + " because of: " + e.getCause()); } finally { stormTaskTuple.getParameters().remove(PluginParameterKeys.RESOURCE_LINK_KEY); if (exception.length() > 0) { stormTaskTuple.addParameter(PluginParameterKeys.EXCEPTION_ERROR_MESSAGE, exception.toString()); stormTaskTuple.addParameter(PluginParameterKeys.UNIFIED_ERROR_MESSAGE, MEDIA_RESOURCE_EXCEPTION); } outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } } LOGGER.info("Resource processing finished in: {}ms", Calendar.getInstance().getTimeInMillis() - processingStartTime); } ResourceProcessingBolt(AmazonClient amazonClient); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldSuccessfullyRemoveStatistics() { doNothing().when(cassandraNodeStatisticsDAO).removeStatistics(eq(TASK_ID)); removerImpl.removeStatistics(TASK_ID); verify(cassandraNodeStatisticsDAO, times(1)).removeStatistics((eq(TASK_ID))); }
@Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
@Test public void deactivateShouldClearTheTaskQueue() throws Exception { final int taskCount = 10; for (int i = 0; i < taskCount; i++) { httpKafkaSpout.taskDownloader.taskQueue.put(new DpsTask()); } assertTrue(!httpKafkaSpout.taskDownloader.taskQueue.isEmpty()); httpKafkaSpout.deactivate(); assertTrue(httpKafkaSpout.taskDownloader.taskQueue.isEmpty()); verify(cassandraTaskInfoDAO, atLeast(taskCount)).setTaskDropped(anyLong(), anyString()); }
@Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); }
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } }
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } HttpKafkaSpout(KafkaSpoutConfig spoutConf); HttpKafkaSpout(KafkaSpoutConfig spoutConf, String hosts, int port, String keyspaceName, String userName, String password); }
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } HttpKafkaSpout(KafkaSpoutConfig spoutConf); HttpKafkaSpout(KafkaSpoutConfig spoutConf, String hosts, int port, String keyspaceName, String userName, String password); @Override void open(Map conf, TopologyContext context, SpoutOutputCollector collector); @Override void nextTuple(); @Override void declareOutputFields(OutputFieldsDeclarer declarer); @Override void deactivate(); }
HttpKafkaSpout extends CustomKafkaSpout { @Override public void deactivate() { LOGGER.info("Deactivate method was executed"); deactivateWaitingTasks(); deactivateCurrentTask(); LOGGER.info("Deactivate method was finished"); } HttpKafkaSpout(KafkaSpoutConfig spoutConf); HttpKafkaSpout(KafkaSpoutConfig spoutConf, String hosts, int port, String keyspaceName, String userName, String password); @Override void open(Map conf, TopologyContext context, SpoutOutputCollector collector); @Override void nextTuple(); @Override void declareOutputFields(OutputFieldsDeclarer declarer); @Override void deactivate(); }
@Test public void shouldUnpackTheZipFilesRecursively() throws CompressionExtensionNotRecognizedException, IOException { zipUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME + ZIP_EXTENSION, DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + DEFAULT_DESTINATION_NAME); assertNotNull(files); assertEquals(XML_FILES_COUNT,files.size()); }
public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } void unpackFile(final String compressedFilePath, final String destinationFolder); }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } void unpackFile(final String compressedFilePath, final String destinationFolder); }
@Test public void shouldUnpackTheZipFilesWithNestedFoldersRecursively() throws CompressionExtensionNotRecognizedException, IOException { zipUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME2 + ZIP_EXTENSION, DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + DEFAULT_DESTINATION_NAME); assertNotNull(files); assertEquals(XML_FILES_COUNT,files.size()); }
public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } void unpackFile(final String compressedFilePath, final String destinationFolder); }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } void unpackFile(final String compressedFilePath, final String destinationFolder); }
@Test public void shouldUnpackTheZipFilesWithNestedMixedCompressedFiles() throws CompressionExtensionNotRecognizedException, IOException { zipUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME3 + ZIP_EXTENSION, DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + DEFAULT_DESTINATION_NAME); assertNotNull(files); assertEquals(XML_FILES_COUNT,files.size()); }
public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } void unpackFile(final String compressedFilePath, final String destinationFolder); }
ZipUnpackingService implements FileUnpackingService { public void unpackFile(final String compressedFilePath, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { final List<String> zipFiles = new ArrayList<>(); ZipUtil.unpack(new File(compressedFilePath), new File(destinationFolder), new NameMapper() { public String map(String name) { if (CompressionFileExtension.contains(FilenameUtils.getExtension(name))) { String compressedFilePath = destinationFolder + name; zipFiles.add(compressedFilePath); } return name; } }); for (String nestedCompressedFile : zipFiles) { String extension = FilenameUtils.getExtension(nestedCompressedFile); UnpackingServiceFactory.createUnpackingService(extension).unpackFile(nestedCompressedFile, FilenameUtils.removeExtension(nestedCompressedFile) + File.separator); } } void unpackFile(final String compressedFilePath, final String destinationFolder); }
@Test public void shouldUnpackTheTarGzFilesRecursively() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME + ".tar.gz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME); assertNotNull(files); assertEquals(XML_FILES_COUNT,files.size()); }
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); static final String TAR; }
@Test public void shouldUnpackTheTarGzFilesRecursivelyWithCompressedXMLFiles() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME2 + ".tar.gz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME2); assertNotNull(files); assertEquals(XML_FILES_COUNT,files.size()); }
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); static final String TAR; }
@Test public void shouldUnpackTheTGZFilesRecursivelyWithCompressedXMLFiles() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME2 + ".tgz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME2); assertNotNull(files); assertEquals(XML_FILES_COUNT,files.size()); }
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); static final String TAR; }
@Test public void shouldUnpackTheTarGzFilesRecursivelyWithMixedNestedCompressedFiles() throws CompressionExtensionNotRecognizedException, IOException { gzUnpackingService.unpackFile(DESTINATION_DIR + FILE_NAME3 + ".tar.gz", DESTINATION_DIR); Collection files = getXMLFiles(DESTINATION_DIR + FILE_NAME3); assertNotNull(files); assertEquals(XML_FILES_COUNT,files.size()); }
public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); }
GzUnpackingService implements FileUnpackingService { public void unpackFile(final String zipFile, final String destinationFolder) throws CompressionExtensionNotRecognizedException, IOException { String[] extensions = CompressionFileExtension.getExtensionValues(); unpackFile(zipFile, destinationFolder, extensions); } void unpackFile(final String zipFile, final String destinationFolder); static final String TAR; }
@Test public void shouldReturnZipService() throws CompressionExtensionNotRecognizedException { FileUnpackingService fileUnpackingService = UnpackingServiceFactory.createUnpackingService(ZIP_EXTENSION); assertTrue(fileUnpackingService instanceof ZipUnpackingService); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
@Test public void shouldReturnGZipService() throws CompressionExtensionNotRecognizedException { FileUnpackingService fileUnpackingService = UnpackingServiceFactory.createUnpackingService(GZIP_EXTENSION); assertTrue(fileUnpackingService instanceof GzUnpackingService); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
@Test(expected = Exception.class) public void shouldRetry5TimesBeforeFailingWhileRemovingStatistics() { doThrow(Exception.class).when(cassandraNodeStatisticsDAO).removeStatistics(eq(TASK_ID)); removerImpl.removeStatistics(TASK_ID); verify(cassandraNodeStatisticsDAO, times(6)).removeStatistics((eq(TASK_ID))); }
@Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
RemoverImpl implements Remover { @Override public void removeStatistics(long taskId) { int retries = DEFAULT_RETRIES; while (true) { try { cassandraNodeStatisticsDAO.removeStatistics(taskId); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while removing the validation statistics. Retries left: " + retries); waitForTheNextCall(); } else { LOGGER.error("rror while removing the validation statistics."); throw e; } } } } RemoverImpl(String hosts, int port, String keyspaceName, String userName, String password); RemoverImpl(CassandraSubTaskInfoDAO subTaskInfoDAO, CassandraTaskErrorsDAO taskErrorDAO, CassandraNodeStatisticsDAO cassandraNodeStatisticsDAO); @Override void removeNotifications(long taskId); @Override void removeErrorReports(long taskId); @Override void removeStatistics(long taskId); }
@Test public void shouldReturnGZipServiceFotTGZExtension() throws CompressionExtensionNotRecognizedException { FileUnpackingService fileUnpackingService = UnpackingServiceFactory.createUnpackingService(TGZIP_EXTENSION); assertTrue(fileUnpackingService instanceof GzUnpackingService); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
@Test(expected = CompressionExtensionNotRecognizedException.class) public void shouldThrowExceptionIfTheExTensionWasNotRecognized() throws CompressionExtensionNotRecognizedException { UnpackingServiceFactory.createUnpackingService(UNDEFINED_COMPRESSION_EXTENSION); }
public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
UnpackingServiceFactory { public static FileUnpackingService createUnpackingService(String compressingExtension) throws CompressionExtensionNotRecognizedException { if (compressingExtension.equals(CompressionFileExtension.ZIP.getExtension())) return ZIP_UNPACKING_SERVICE; else if (compressingExtension.equals(CompressionFileExtension.GZIP.getExtension()) || compressingExtension.equals(CompressionFileExtension.TGZIP.getExtension())) return GZ_UNPACKING_SERVICE; else throw new CompressionExtensionNotRecognizedException("This compression extension is not recognized " + compressingExtension); } static FileUnpackingService createUnpackingService(String compressingExtension); }
@Test public void executeBolt() throws IOException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, readMockContentOfURL(sampleXmlFileName), prepareStormTaskTupleParameters(sampleXsltFileName), new Revision()); xsltBolt.execute(anchorTuple, tuple); when(outputCollector.emit(anyList())).thenReturn(null); verify(outputCollector, times(1)).emit(Mockito.any(Tuple.class), captor.capture()); assertThat(captor.getAllValues().size(), is(1)); List<Values> allValues = captor.getAllValues(); assertEmittedTuple(allValues, 4); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void executeBoltWithInjection() throws IOException { Tuple anchorTuple = mock(TupleImpl.class); HashMap<String, String> parameters = prepareStormTaskTupleParameters(injectNodeXsltFileName); parameters.put(PluginParameterKeys.METIS_DATASET_ID, EXAMPLE_METIS_DATASET_ID); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, readMockContentOfURL(injectXmlFileName), parameters, new Revision()); xsltBolt.execute(anchorTuple, tuple); when(outputCollector.emit(anyList())).thenReturn(null); verify(outputCollector, times(1)).emit(Mockito.any(Tuple.class), captor.capture()); assertThat(captor.getAllValues().size(), is(1)); List<Values> allValues = captor.getAllValues(); assertEmittedTuple(allValues, 4); String transformed = new String((byte[]) allValues.get(0).get(3)); assertNotNull(transformed); assertTrue(transformed.contains(EXAMPLE_METIS_DATASET_ID)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
XsltBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { StringWriter writer = null; try { final String fileUrl = stormTaskTuple.getFileUrl(); final String xsltUrl = stormTaskTuple.getParameter(PluginParameterKeys.XSLT_URL); LOGGER.info("Processing file: {} with xslt schema:{}", fileUrl, xsltUrl); final XsltTransformer xsltTransformer = prepareXsltTransformer(stormTaskTuple); writer = xsltTransformer .transform(stormTaskTuple.getFileData(), prepareEuropeanaGeneratedIdsMap(stormTaskTuple)); LOGGER.info("XsltBolt: transformation success for: {}", fileUrl); stormTaskTuple.setFileData(writer.toString().getBytes(StandardCharsets.UTF_8)); final UrlParser urlParser = new UrlParser(fileUrl); if (urlParser.isUrlToRepresentationVersionFile()) { stormTaskTuple .addParameter(PluginParameterKeys.CLOUD_ID, urlParser.getPart(UrlPart.RECORDS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_NAME, urlParser.getPart(UrlPart.REPRESENTATIONS)); stormTaskTuple.addParameter(PluginParameterKeys.REPRESENTATION_VERSION, urlParser.getPart(UrlPart.VERSIONS)); } clearParametersStormTuple(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); outputCollector.ack(anchorTuple); } catch (Exception e) { LOGGER.error("XsltBolt error:{}", e.getMessage()); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), "", e.getMessage(), ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); outputCollector.ack(anchorTuple); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOGGER.error("Error: during closing the writer {}", e.getMessage()); } } } } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldIndexFileForPreviewEnv() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); String targetIndexingEnv = "PREVIEW"; StormTaskTuple tuple = mockStormTupleFor(targetIndexingEnv); mockIndexerFactoryFor(null); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(1)).emit(any(Tuple.class), captor.capture()); Values capturedValues = captor.getValue(); assertEquals(8, capturedValues.size()); assertEquals("sampleResourceUrl", capturedValues.get(2)); Map<String, String> parameters = (Map<String, String>) capturedValues.get(4); assertEquals(5, parameters.size()); DataSetCleanerParameters dataSetCleanerParameters = new Gson().fromJson(parameters.get(PluginParameterKeys.DATA_SET_CLEANING_PARAMETERS), DataSetCleanerParameters.class); assertFalse(dataSetCleanerParameters.isUsingAltEnv()); assertEquals(targetIndexingEnv, dataSetCleanerParameters.getTargetIndexingEnv()); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final String DATE_FORMAT; static final String PARSE_RECORD_DATE_ERROR_MESSAGE; static final String INDEXING_FILE_ERROR_MESSAGE; }
@Test public void shouldIndexFilePublishEnv() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); String targetIndexingEnv = "PUBLISH"; StormTaskTuple tuple = mockStormTupleFor(targetIndexingEnv); mockIndexerFactoryFor(null); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(1)).emit(any(Tuple.class), captor.capture()); Values capturedValues = captor.getValue(); assertEquals(8, capturedValues.size()); assertEquals("sampleResourceUrl", capturedValues.get(2)); Map<String, String> parameters = (Map<String, String>) capturedValues.get(4); assertEquals(5, parameters.size()); DataSetCleanerParameters dataSetCleanerParameters = new Gson().fromJson(parameters.get(PluginParameterKeys.DATA_SET_CLEANING_PARAMETERS), DataSetCleanerParameters.class); assertFalse(dataSetCleanerParameters.isUsingAltEnv()); assertEquals(targetIndexingEnv, dataSetCleanerParameters.getTargetIndexingEnv()); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final String DATE_FORMAT; static final String PARSE_RECORD_DATE_ERROR_MESSAGE; static final String INDEXING_FILE_ERROR_MESSAGE; }
@Test public void shouldEmitErrorNotificationForIndexerConfiguration() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("PREVIEW"); mockIndexerFactoryFor(IndexerRelatedIndexingException.class); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(1)).emit(any(String.class), any(Tuple.class), captor.capture()); Values capturedValues = captor.getValue(); Map val = (Map) capturedValues.get(2); assertEquals("sampleResourceUrl", val.get("resource")); Assert.assertTrue(val.get("additionalInfo").toString().contains("Error while indexing")); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final String DATE_FORMAT; static final String PARSE_RECORD_DATE_ERROR_MESSAGE; static final String INDEXING_FILE_ERROR_MESSAGE; }
@Test public void shouldEmitErrorNotificationForIndexing() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("PUBLISH"); mockIndexerFactoryFor(IndexerRelatedIndexingException.class); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(1)).emit(any(String.class), any(Tuple.class), captor.capture()); Values capturedValues = captor.getValue(); Map val = (Map) capturedValues.get(2); assertEquals("sampleResourceUrl", val.get("resource")); Assert.assertTrue(val.get("additionalInfo").toString().contains("Error while indexing")); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final String DATE_FORMAT; static final String PARSE_RECORD_DATE_ERROR_MESSAGE; static final String INDEXING_FILE_ERROR_MESSAGE; }
@Test public void shouldThrowExceptionWhenDateIsUnParsable() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("PREVIEW"); tuple.getParameters().remove(PluginParameterKeys.METIS_RECORD_DATE); tuple.addParameter(PluginParameterKeys.METIS_RECORD_DATE, "UN_PARSABLE_DATE"); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(1)).emit(any(String.class), any(Tuple.class), captor.capture()); Values capturedValues = captor.getValue(); Map val = (Map) capturedValues.get(2); assertEquals("sampleResourceUrl", val.get("resource")); Assert.assertTrue(val.get("info_text").toString().contains("Could not parse RECORD_DATE parameter")); Assert.assertTrue(val.get("state").toString().equals("ERROR")); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final String DATE_FORMAT; static final String PARSE_RECORD_DATE_ERROR_MESSAGE; static final String INDEXING_FILE_ERROR_MESSAGE; }
@Test public void shouldThrowExceptionForUnknownEnv() throws IndexingException { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = mockStormTupleFor("UNKNOWN_ENVIRONMENT"); mockIndexerFactoryFor(RuntimeException.class); indexingBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(1)).emit(any(String.class), any(Tuple.class), captor.capture()); Values capturedValues = captor.getValue(); Map val = (Map) capturedValues.get(2); assertEquals("sampleResourceUrl", val.get("resource")); Assert.assertTrue(val.get("state").toString().equals("ERROR")); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
IndexingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { final String useAltEnv = stormTaskTuple .getParameter(PluginParameterKeys.METIS_USE_ALT_INDEXING_ENV); final String datasetId = stormTaskTuple.getParameter(PluginParameterKeys.METIS_DATASET_ID); final String database = stormTaskTuple .getParameter(PluginParameterKeys.METIS_TARGET_INDEXING_DATABASE); final boolean preserveTimestampsString = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.METIS_PRESERVE_TIMESTAMPS)); final String datasetIdsToRedirectFrom = stormTaskTuple .getParameter(PluginParameterKeys.DATASET_IDS_TO_REDIRECT_FROM); final List<String> datasetIdsToRedirectFromList = datasetIdsToRedirectFrom == null ? null : Arrays.asList(datasetIdsToRedirectFrom.trim().split("\\s*,\\s*")); final boolean performRedirects = Boolean .parseBoolean(stormTaskTuple.getParameter(PluginParameterKeys.PERFORM_REDIRECTS)); String dpsURL = indexingProperties.getProperty(PluginParameterKeys.DPS_URL); DateFormat dateFormat = new SimpleDateFormat(DATE_FORMAT, Locale.US); final Date recordDate; try { final IndexerPool indexerPool = indexerPoolWrapper.getIndexerPool(useAltEnv, database); recordDate = dateFormat .parse(stormTaskTuple.getParameter(PluginParameterKeys.METIS_RECORD_DATE)); final String document = new String(stormTaskTuple.getFileData()); indexerPool .index(document, recordDate, preserveTimestampsString, datasetIdsToRedirectFromList, performRedirects); prepareTuple(stormTaskTuple, useAltEnv, datasetId, database, recordDate, dpsURL); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info( "Indexing bolt executed for: {} (alternative environment: {}, record date: {}, preserve timestamps: {}).", database, useAltEnv, recordDate, preserveTimestampsString); } catch (RuntimeException e) { logAndEmitError(anchorTuple, e, e.getMessage(), stormTaskTuple); } catch (ParseException e) { logAndEmitError(anchorTuple, e, PARSE_RECORD_DATE_ERROR_MESSAGE, stormTaskTuple); } catch (IndexingException e) { logAndEmitError(anchorTuple, e, INDEXING_FILE_ERROR_MESSAGE, stormTaskTuple); } outputCollector.ack(anchorTuple); } IndexingBolt(Properties indexingProperties); @Override void prepare(); @Override void cleanup(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); static final String DATE_FORMAT; static final String PARSE_RECORD_DATE_ERROR_MESSAGE; static final String INDEXING_FILE_ERROR_MESSAGE; }
@Test public void shouldInvokeAllTheRemovalStepsIncludingErrorReports() { removerInvoker.executeInvokerForSingleTask(TASK_ID, true); verify(remover, times(1)).removeNotifications((eq(TASK_ID))); verify(remover, times(1)).removeStatistics((eq(TASK_ID))); verify(remover, times(1)).removeErrorReports((eq(TASK_ID))); }
public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } RemoverInvoker(Remover remover); }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } RemoverInvoker(Remover remover); void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors); void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors); }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } RemoverInvoker(Remover remover); void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors); void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors); }
@Test public void testImportTopic() throws Exception { List<String> topics = setupTopic(zkClient, TEST_TOPIC_NAME); AtlasEntity.AtlasEntityWithExtInfo atlasEntityWithExtInfo = new AtlasEntity.AtlasEntityWithExtInfo( getTopicEntityWithGuid("0dd466a4-3838-4537-8969-6abb8b9e9185")); KafkaBridge kafkaBridge = mock(KafkaBridge.class); when(kafkaBridge.createEntityInAtlas(atlasEntityWithExtInfo)).thenReturn(atlasEntityWithExtInfo); try { kafkaBridge.importTopic(TEST_TOPIC_NAME); } catch (Exception e) { Assert.fail("KafkaBridge import failed ", e); } }
public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add(topic); } } topics = topics_subset; } if (CollectionUtils.isNotEmpty(topics)) { for(String topic : topics) { createOrUpdateTopic(topic); } } }
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add(topic); } } topics = topics_subset; } if (CollectionUtils.isNotEmpty(topics)) { for(String topic : topics) { createOrUpdateTopic(topic); } } } }
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add(topic); } } topics = topics_subset; } if (CollectionUtils.isNotEmpty(topics)) { for(String topic : topics) { createOrUpdateTopic(topic); } } } KafkaBridge(Configuration atlasConf, AtlasClientV2 atlasClientV2); }
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add(topic); } } topics = topics_subset; } if (CollectionUtils.isNotEmpty(topics)) { for(String topic : topics) { createOrUpdateTopic(topic); } } } KafkaBridge(Configuration atlasConf, AtlasClientV2 atlasClientV2); static void main(String[] args); void importTopic(String topicToImport); }
KafkaBridge { public void importTopic(String topicToImport) throws Exception { List<String> topics = availableTopics; if (StringUtils.isNotEmpty(topicToImport)) { List<String> topics_subset = new ArrayList<>(); for(String topic : topics) { if (Pattern.compile(topicToImport).matcher(topic).matches()) { topics_subset.add(topic); } } topics = topics_subset; } if (CollectionUtils.isNotEmpty(topics)) { for(String topic : topics) { createOrUpdateTopic(topic); } } } KafkaBridge(Configuration atlasConf, AtlasClientV2 atlasClientV2); static void main(String[] args); void importTopic(String topicToImport); }
@Test public void ALLEntityType() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(SearchParameters.ALL_ENTITY_TYPES); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchProcessor(context); assertEquals(processor.execute().size(), 20); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void ALLEntityTypeWithTag() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(SearchParameters.ALL_ENTITY_TYPES); params.setClassification(FACT_CLASSIFICATION); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchProcessor(context); assertEquals(processor.execute().size(), 5); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void entityType() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchProcessor(context); assertEquals(processor.execute().size(), 3); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void entityTypes() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchProcessor(context); assertEquals(processor.execute().size(), 14); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void entityTypesAndTag() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); params.setClassification(FACT_CLASSIFICATION); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchProcessor(context); assertEquals(processor.execute().size(), 3); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void searchWithEntityTypesAndEntityFilters() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("owner", SearchParameters.Operator.CONTAINS, "ETL"); params.setEntityFilters(filterCriteria); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchProcessor(context); assertEquals(processor.execute().size(), 4); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void searchWithEntityTypesAndEntityFiltersAndTag() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(DATABASE_TYPE+","+HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("owner", SearchParameters.Operator.CONTAINS, "ETL"); params.setEntityFilters(filterCriteria); params.setClassification(LOGDATA_CLASSIFICATION); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, Collections.<String>emptySet()); EntitySearchProcessor processor = new EntitySearchProcessor(context); assertEquals(processor.execute().size(), 2); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void searchWithNotContains_stringAttr() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("tableType", SearchParameters.Operator.NOT_CONTAINS, "Managed"); params.setEntityFilters(filterCriteria); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, indexer.getVertexIndexKeys()); EntitySearchProcessor processor = new EntitySearchProcessor(context); List<AtlasVertex> vertices = processor.execute(); assertEquals(vertices.size(), 3); List<String> nameList = new ArrayList<>(); for (AtlasVertex vertex : vertices) { nameList.add((String) entityRetriever.toAtlasEntityHeader(vertex, Collections.singleton("name")).getAttribute("name")); } assertTrue(nameList.contains(expectedEntityName)); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void searchWithNotContains_pipeSeperatedAttr() throws AtlasBaseException { SearchParameters params = new SearchParameters(); params.setTypeName(HIVE_TABLE_TYPE); SearchParameters.FilterCriteria filterCriteria = getSingleFilterCondition("__classificationNames", SearchParameters.Operator.NOT_CONTAINS, METRIC_CLASSIFICATION); params.setEntityFilters(filterCriteria); params.setLimit(20); SearchContext context = new SearchContext(params, typeRegistry, graph, indexer.getVertexIndexKeys()); EntitySearchProcessor processor = new EntitySearchProcessor(context); List<AtlasVertex> vertices = processor.execute(); assertEquals(vertices.size(), 7); List<String> nameList = new ArrayList<>(); for (AtlasVertex vertex : vertices) { nameList.add((String) entityRetriever.toAtlasEntityHeader(vertex, Collections.singleton("name")).getAttribute("name")); } assertTrue(nameList.contains(expectedEntityName)); }
@Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
EntitySearchProcessor extends SearchProcessor { @Override public List<AtlasVertex> execute() { if (LOG.isDebugEnabled()) { LOG.debug("==> EntitySearchProcessor.execute({})", context); } List<AtlasVertex> ret = new ArrayList<>(); AtlasPerfTracer perf = null; if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntitySearchProcessor.execute(" + context + ")"); } try { final int startIdx = context.getSearchParameters().getOffset(); final int limit = context.getSearchParameters().getLimit(); int qryOffset = (nextProcessor != null || (graphQuery != null && indexQuery != null)) ? 0 : startIdx; int resultIdx = qryOffset; final List<AtlasVertex> entityVertices = new ArrayList<>(); SortOrder sortOrder = context.getSearchParameters().getSortOrder(); String sortBy = context.getSearchParameters().getSortBy(); final AtlasEntityType entityType = context.getEntityTypes().iterator().next(); AtlasAttribute sortByAttribute = entityType.getAttribute(sortBy); if (sortByAttribute == null) { sortBy = null; } else { sortBy = sortByAttribute.getVertexPropertyName(); } if (sortOrder == null) { sortOrder = ASCENDING; } for (; ret.size() < limit; qryOffset += limit) { entityVertices.clear(); if (context.terminateSearch()) { LOG.warn("query terminated: {}", context.getSearchParameters()); break; } final boolean isLastResultPage; if (indexQuery != null) { Iterator<AtlasIndexQuery.Result> idxQueryResult = executeIndexQuery(context, indexQuery, qryOffset, limit); getVerticesFromIndexQueryResult(idxQueryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } else { Iterator<AtlasVertex> queryResult = graphQuery.vertices(qryOffset, limit).iterator(); getVertices(queryResult, entityVertices); isLastResultPage = entityVertices.size() < limit; CollectionUtils.filter(entityVertices, inMemoryPredicate); if (graphQueryPredicate != null) { CollectionUtils.filter(entityVertices, graphQueryPredicate); } } super.filter(entityVertices); resultIdx = collectResultVertices(ret, startIdx, limit, resultIdx, entityVertices); if (isLastResultPage) { break; } } } finally { AtlasPerfTracer.log(perf); } if (LOG.isDebugEnabled()) { LOG.debug("<== EntitySearchProcessor.execute({}): ret.size()={}", context, ret.size()); } return ret; } EntitySearchProcessor(SearchContext context); @Override List<AtlasVertex> execute(); @Override void filter(List<AtlasVertex> entityVertices); @Override long getResultCount(); }
@Test public void testGetMetrics() { AtlasMetrics metrics = metricsService.getMetrics(); assertNotNull(metrics); assertEquals(metrics.getNumericMetric(GENERAL, METRIC_ENTITY_COUNT).intValue(), 43); assertEquals(metrics.getNumericMetric(GENERAL, METRIC_TAG_COUNT).intValue(), 1); assertTrue(metrics.getNumericMetric(GENERAL, METRIC_TYPE_UNUSED_COUNT).intValue() >= 10); assertTrue(metrics.getNumericMetric(GENERAL, METRIC_TYPE_COUNT).intValue() >= 44); Map tagMetricsActual = (Map) metrics.getMetric(TAG, METRIC_ENTITIES_PER_TAG); Map activeEntityMetricsActual = (Map) metrics.getMetric(ENTITY, METRIC_ENTITY_ACTIVE); Map deletedEntityMetricsActual = (Map) metrics.getMetric(ENTITY, METRIC_ENTITY_DELETED); assertEquals(tagMetricsActual.size(), 1); assertEquals(activeEntityMetricsActual.size(), 8); assertEquals(deletedEntityMetricsActual.size(), 4); assertEquals(tagMetricsActual, tagMetricsExpected); assertEquals(activeEntityMetricsActual, activeEntityMetricsExpected); assertEquals(deletedEntityMetricsActual, deletedEntityMetricsExpected); }
@SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long> activeEntityCount = new HashMap<>(); Map<String, Long> deletedEntityCount = new HashMap<>(); Map<String, Long> shellEntityCount = new HashMap<>(); Map<String, Long> taggedEntityCount = new HashMap<>(); Map<String, Long> activeEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> deletedEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> shellEntityCountTypeAndSubTypes = new HashMap<>(); long unusedTypeCount = 0; long totalEntities = 0; if (entityDefs != null) { for (AtlasEntityDef entityDef : entityDefs) { long activeCount = getTypeCount(entityDef.getName(), ACTIVE); long deletedCount = getTypeCount(entityDef.getName(), DELETED); long shellCount = getTypeShellCount(entityDef.getName()); if (activeCount > 0) { activeEntityCount.put(entityDef.getName(), activeCount); totalEntities += activeCount; } if (deletedCount > 0) { deletedEntityCount.put(entityDef.getName(), deletedCount); totalEntities += deletedCount; } if (activeCount == 0 && deletedCount == 0) { unusedTypeCount++; } if (shellCount > 0) { shellEntityCount.put(entityDef.getName(), shellCount); } } for (AtlasEntityDef entityDef : entityDefs) { AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entityDef.getName()); long entityActiveCount = 0; long entityDeletedCount = 0; long entityShellCount = 0; for (String type : entityType.getTypeAndAllSubTypes()) { entityActiveCount += activeEntityCount.get(type) == null ? 0 : activeEntityCount.get(type); entityDeletedCount += deletedEntityCount.get(type) == null ? 0 : deletedEntityCount.get(type); entityShellCount += shellEntityCount.get(type) == null ? 0 : shellEntityCount.get(type); } if (entityActiveCount > 0) { activeEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityActiveCount); } if (entityDeletedCount > 0) { deletedEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityDeletedCount); } if (entityShellCount > 0) { shellEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityShellCount); } } } if (classificationDefs != null) { for (AtlasClassificationDef classificationDef : classificationDefs) { long count = getTypeCount(classificationDef.getName(), ACTIVE); if (count > 0) { taggedEntityCount.put(classificationDef.getName(), count); } } } AtlasMetrics metrics = new AtlasMetrics(); metrics.addMetric(GENERAL, METRIC_COLLECTION_TIME, System.currentTimeMillis()); metrics.addMetric(GENERAL, METRIC_STATS, metricsUtil.getStats()); metrics.addMetric(GENERAL, METRIC_TYPE_COUNT, getAllTypesCount()); metrics.addMetric(GENERAL, METRIC_TAG_COUNT, getAllTagsCount()); metrics.addMetric(GENERAL, METRIC_TYPE_UNUSED_COUNT, unusedTypeCount); metrics.addMetric(GENERAL, METRIC_ENTITY_COUNT, totalEntities); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE, activeEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED, deletedEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL, shellEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE_INCL_SUBTYPES, activeEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED_INCL_SUBTYPES, deletedEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL_INCL_SUBTYPES, shellEntityCountTypeAndSubTypes); metrics.addMetric(TAG, METRIC_ENTITIES_PER_TAG, taggedEntityCount); metrics.addMetric(SYSTEM, METRIC_MEMORY, AtlasMetricJVMUtil.getMemoryDetails()); metrics.addMetric(SYSTEM, METRIC_OS, AtlasMetricJVMUtil.getSystemInfo()); metrics.addMetric(SYSTEM, METRIC_RUNTIME, AtlasMetricJVMUtil.getRuntimeInfo()); return metrics; }
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long> activeEntityCount = new HashMap<>(); Map<String, Long> deletedEntityCount = new HashMap<>(); Map<String, Long> shellEntityCount = new HashMap<>(); Map<String, Long> taggedEntityCount = new HashMap<>(); Map<String, Long> activeEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> deletedEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> shellEntityCountTypeAndSubTypes = new HashMap<>(); long unusedTypeCount = 0; long totalEntities = 0; if (entityDefs != null) { for (AtlasEntityDef entityDef : entityDefs) { long activeCount = getTypeCount(entityDef.getName(), ACTIVE); long deletedCount = getTypeCount(entityDef.getName(), DELETED); long shellCount = getTypeShellCount(entityDef.getName()); if (activeCount > 0) { activeEntityCount.put(entityDef.getName(), activeCount); totalEntities += activeCount; } if (deletedCount > 0) { deletedEntityCount.put(entityDef.getName(), deletedCount); totalEntities += deletedCount; } if (activeCount == 0 && deletedCount == 0) { unusedTypeCount++; } if (shellCount > 0) { shellEntityCount.put(entityDef.getName(), shellCount); } } for (AtlasEntityDef entityDef : entityDefs) { AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entityDef.getName()); long entityActiveCount = 0; long entityDeletedCount = 0; long entityShellCount = 0; for (String type : entityType.getTypeAndAllSubTypes()) { entityActiveCount += activeEntityCount.get(type) == null ? 0 : activeEntityCount.get(type); entityDeletedCount += deletedEntityCount.get(type) == null ? 0 : deletedEntityCount.get(type); entityShellCount += shellEntityCount.get(type) == null ? 0 : shellEntityCount.get(type); } if (entityActiveCount > 0) { activeEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityActiveCount); } if (entityDeletedCount > 0) { deletedEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityDeletedCount); } if (entityShellCount > 0) { shellEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityShellCount); } } } if (classificationDefs != null) { for (AtlasClassificationDef classificationDef : classificationDefs) { long count = getTypeCount(classificationDef.getName(), ACTIVE); if (count > 0) { taggedEntityCount.put(classificationDef.getName(), count); } } } AtlasMetrics metrics = new AtlasMetrics(); metrics.addMetric(GENERAL, METRIC_COLLECTION_TIME, System.currentTimeMillis()); metrics.addMetric(GENERAL, METRIC_STATS, metricsUtil.getStats()); metrics.addMetric(GENERAL, METRIC_TYPE_COUNT, getAllTypesCount()); metrics.addMetric(GENERAL, METRIC_TAG_COUNT, getAllTagsCount()); metrics.addMetric(GENERAL, METRIC_TYPE_UNUSED_COUNT, unusedTypeCount); metrics.addMetric(GENERAL, METRIC_ENTITY_COUNT, totalEntities); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE, activeEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED, deletedEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL, shellEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE_INCL_SUBTYPES, activeEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED_INCL_SUBTYPES, deletedEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL_INCL_SUBTYPES, shellEntityCountTypeAndSubTypes); metrics.addMetric(TAG, METRIC_ENTITIES_PER_TAG, taggedEntityCount); metrics.addMetric(SYSTEM, METRIC_MEMORY, AtlasMetricJVMUtil.getMemoryDetails()); metrics.addMetric(SYSTEM, METRIC_OS, AtlasMetricJVMUtil.getSystemInfo()); metrics.addMetric(SYSTEM, METRIC_RUNTIME, AtlasMetricJVMUtil.getRuntimeInfo()); return metrics; } }
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long> activeEntityCount = new HashMap<>(); Map<String, Long> deletedEntityCount = new HashMap<>(); Map<String, Long> shellEntityCount = new HashMap<>(); Map<String, Long> taggedEntityCount = new HashMap<>(); Map<String, Long> activeEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> deletedEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> shellEntityCountTypeAndSubTypes = new HashMap<>(); long unusedTypeCount = 0; long totalEntities = 0; if (entityDefs != null) { for (AtlasEntityDef entityDef : entityDefs) { long activeCount = getTypeCount(entityDef.getName(), ACTIVE); long deletedCount = getTypeCount(entityDef.getName(), DELETED); long shellCount = getTypeShellCount(entityDef.getName()); if (activeCount > 0) { activeEntityCount.put(entityDef.getName(), activeCount); totalEntities += activeCount; } if (deletedCount > 0) { deletedEntityCount.put(entityDef.getName(), deletedCount); totalEntities += deletedCount; } if (activeCount == 0 && deletedCount == 0) { unusedTypeCount++; } if (shellCount > 0) { shellEntityCount.put(entityDef.getName(), shellCount); } } for (AtlasEntityDef entityDef : entityDefs) { AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entityDef.getName()); long entityActiveCount = 0; long entityDeletedCount = 0; long entityShellCount = 0; for (String type : entityType.getTypeAndAllSubTypes()) { entityActiveCount += activeEntityCount.get(type) == null ? 0 : activeEntityCount.get(type); entityDeletedCount += deletedEntityCount.get(type) == null ? 0 : deletedEntityCount.get(type); entityShellCount += shellEntityCount.get(type) == null ? 0 : shellEntityCount.get(type); } if (entityActiveCount > 0) { activeEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityActiveCount); } if (entityDeletedCount > 0) { deletedEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityDeletedCount); } if (entityShellCount > 0) { shellEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityShellCount); } } } if (classificationDefs != null) { for (AtlasClassificationDef classificationDef : classificationDefs) { long count = getTypeCount(classificationDef.getName(), ACTIVE); if (count > 0) { taggedEntityCount.put(classificationDef.getName(), count); } } } AtlasMetrics metrics = new AtlasMetrics(); metrics.addMetric(GENERAL, METRIC_COLLECTION_TIME, System.currentTimeMillis()); metrics.addMetric(GENERAL, METRIC_STATS, metricsUtil.getStats()); metrics.addMetric(GENERAL, METRIC_TYPE_COUNT, getAllTypesCount()); metrics.addMetric(GENERAL, METRIC_TAG_COUNT, getAllTagsCount()); metrics.addMetric(GENERAL, METRIC_TYPE_UNUSED_COUNT, unusedTypeCount); metrics.addMetric(GENERAL, METRIC_ENTITY_COUNT, totalEntities); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE, activeEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED, deletedEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL, shellEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE_INCL_SUBTYPES, activeEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED_INCL_SUBTYPES, deletedEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL_INCL_SUBTYPES, shellEntityCountTypeAndSubTypes); metrics.addMetric(TAG, METRIC_ENTITIES_PER_TAG, taggedEntityCount); metrics.addMetric(SYSTEM, METRIC_MEMORY, AtlasMetricJVMUtil.getMemoryDetails()); metrics.addMetric(SYSTEM, METRIC_OS, AtlasMetricJVMUtil.getSystemInfo()); metrics.addMetric(SYSTEM, METRIC_RUNTIME, AtlasMetricJVMUtil.getRuntimeInfo()); return metrics; } @Inject MetricsService(final AtlasGraph graph, final AtlasTypeRegistry typeRegistry, AtlasMetricsUtil metricsUtil); }
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long> activeEntityCount = new HashMap<>(); Map<String, Long> deletedEntityCount = new HashMap<>(); Map<String, Long> shellEntityCount = new HashMap<>(); Map<String, Long> taggedEntityCount = new HashMap<>(); Map<String, Long> activeEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> deletedEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> shellEntityCountTypeAndSubTypes = new HashMap<>(); long unusedTypeCount = 0; long totalEntities = 0; if (entityDefs != null) { for (AtlasEntityDef entityDef : entityDefs) { long activeCount = getTypeCount(entityDef.getName(), ACTIVE); long deletedCount = getTypeCount(entityDef.getName(), DELETED); long shellCount = getTypeShellCount(entityDef.getName()); if (activeCount > 0) { activeEntityCount.put(entityDef.getName(), activeCount); totalEntities += activeCount; } if (deletedCount > 0) { deletedEntityCount.put(entityDef.getName(), deletedCount); totalEntities += deletedCount; } if (activeCount == 0 && deletedCount == 0) { unusedTypeCount++; } if (shellCount > 0) { shellEntityCount.put(entityDef.getName(), shellCount); } } for (AtlasEntityDef entityDef : entityDefs) { AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entityDef.getName()); long entityActiveCount = 0; long entityDeletedCount = 0; long entityShellCount = 0; for (String type : entityType.getTypeAndAllSubTypes()) { entityActiveCount += activeEntityCount.get(type) == null ? 0 : activeEntityCount.get(type); entityDeletedCount += deletedEntityCount.get(type) == null ? 0 : deletedEntityCount.get(type); entityShellCount += shellEntityCount.get(type) == null ? 0 : shellEntityCount.get(type); } if (entityActiveCount > 0) { activeEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityActiveCount); } if (entityDeletedCount > 0) { deletedEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityDeletedCount); } if (entityShellCount > 0) { shellEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityShellCount); } } } if (classificationDefs != null) { for (AtlasClassificationDef classificationDef : classificationDefs) { long count = getTypeCount(classificationDef.getName(), ACTIVE); if (count > 0) { taggedEntityCount.put(classificationDef.getName(), count); } } } AtlasMetrics metrics = new AtlasMetrics(); metrics.addMetric(GENERAL, METRIC_COLLECTION_TIME, System.currentTimeMillis()); metrics.addMetric(GENERAL, METRIC_STATS, metricsUtil.getStats()); metrics.addMetric(GENERAL, METRIC_TYPE_COUNT, getAllTypesCount()); metrics.addMetric(GENERAL, METRIC_TAG_COUNT, getAllTagsCount()); metrics.addMetric(GENERAL, METRIC_TYPE_UNUSED_COUNT, unusedTypeCount); metrics.addMetric(GENERAL, METRIC_ENTITY_COUNT, totalEntities); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE, activeEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED, deletedEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL, shellEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE_INCL_SUBTYPES, activeEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED_INCL_SUBTYPES, deletedEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL_INCL_SUBTYPES, shellEntityCountTypeAndSubTypes); metrics.addMetric(TAG, METRIC_ENTITIES_PER_TAG, taggedEntityCount); metrics.addMetric(SYSTEM, METRIC_MEMORY, AtlasMetricJVMUtil.getMemoryDetails()); metrics.addMetric(SYSTEM, METRIC_OS, AtlasMetricJVMUtil.getSystemInfo()); metrics.addMetric(SYSTEM, METRIC_RUNTIME, AtlasMetricJVMUtil.getRuntimeInfo()); return metrics; } @Inject MetricsService(final AtlasGraph graph, final AtlasTypeRegistry typeRegistry, AtlasMetricsUtil metricsUtil); @SuppressWarnings("unchecked") @GraphTransaction AtlasMetrics getMetrics(); }
MetricsService { @SuppressWarnings("unchecked") @GraphTransaction public AtlasMetrics getMetrics() { final AtlasTypesDef typesDef = getTypesDef(); Collection<AtlasEntityDef> entityDefs = typesDef.getEntityDefs(); Collection<AtlasClassificationDef> classificationDefs = typesDef.getClassificationDefs(); Map<String, Long> activeEntityCount = new HashMap<>(); Map<String, Long> deletedEntityCount = new HashMap<>(); Map<String, Long> shellEntityCount = new HashMap<>(); Map<String, Long> taggedEntityCount = new HashMap<>(); Map<String, Long> activeEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> deletedEntityCountTypeAndSubTypes = new HashMap<>(); Map<String, Long> shellEntityCountTypeAndSubTypes = new HashMap<>(); long unusedTypeCount = 0; long totalEntities = 0; if (entityDefs != null) { for (AtlasEntityDef entityDef : entityDefs) { long activeCount = getTypeCount(entityDef.getName(), ACTIVE); long deletedCount = getTypeCount(entityDef.getName(), DELETED); long shellCount = getTypeShellCount(entityDef.getName()); if (activeCount > 0) { activeEntityCount.put(entityDef.getName(), activeCount); totalEntities += activeCount; } if (deletedCount > 0) { deletedEntityCount.put(entityDef.getName(), deletedCount); totalEntities += deletedCount; } if (activeCount == 0 && deletedCount == 0) { unusedTypeCount++; } if (shellCount > 0) { shellEntityCount.put(entityDef.getName(), shellCount); } } for (AtlasEntityDef entityDef : entityDefs) { AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entityDef.getName()); long entityActiveCount = 0; long entityDeletedCount = 0; long entityShellCount = 0; for (String type : entityType.getTypeAndAllSubTypes()) { entityActiveCount += activeEntityCount.get(type) == null ? 0 : activeEntityCount.get(type); entityDeletedCount += deletedEntityCount.get(type) == null ? 0 : deletedEntityCount.get(type); entityShellCount += shellEntityCount.get(type) == null ? 0 : shellEntityCount.get(type); } if (entityActiveCount > 0) { activeEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityActiveCount); } if (entityDeletedCount > 0) { deletedEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityDeletedCount); } if (entityShellCount > 0) { shellEntityCountTypeAndSubTypes.put(entityType.getTypeName(), entityShellCount); } } } if (classificationDefs != null) { for (AtlasClassificationDef classificationDef : classificationDefs) { long count = getTypeCount(classificationDef.getName(), ACTIVE); if (count > 0) { taggedEntityCount.put(classificationDef.getName(), count); } } } AtlasMetrics metrics = new AtlasMetrics(); metrics.addMetric(GENERAL, METRIC_COLLECTION_TIME, System.currentTimeMillis()); metrics.addMetric(GENERAL, METRIC_STATS, metricsUtil.getStats()); metrics.addMetric(GENERAL, METRIC_TYPE_COUNT, getAllTypesCount()); metrics.addMetric(GENERAL, METRIC_TAG_COUNT, getAllTagsCount()); metrics.addMetric(GENERAL, METRIC_TYPE_UNUSED_COUNT, unusedTypeCount); metrics.addMetric(GENERAL, METRIC_ENTITY_COUNT, totalEntities); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE, activeEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED, deletedEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL, shellEntityCount); metrics.addMetric(ENTITY, METRIC_ENTITY_ACTIVE_INCL_SUBTYPES, activeEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_DELETED_INCL_SUBTYPES, deletedEntityCountTypeAndSubTypes); metrics.addMetric(ENTITY, METRIC_ENTITY_SHELL_INCL_SUBTYPES, shellEntityCountTypeAndSubTypes); metrics.addMetric(TAG, METRIC_ENTITIES_PER_TAG, taggedEntityCount); metrics.addMetric(SYSTEM, METRIC_MEMORY, AtlasMetricJVMUtil.getMemoryDetails()); metrics.addMetric(SYSTEM, METRIC_OS, AtlasMetricJVMUtil.getSystemInfo()); metrics.addMetric(SYSTEM, METRIC_RUNTIME, AtlasMetricJVMUtil.getRuntimeInfo()); return metrics; } @Inject MetricsService(final AtlasGraph graph, final AtlasTypeRegistry typeRegistry, AtlasMetricsUtil metricsUtil); @SuppressWarnings("unchecked") @GraphTransaction AtlasMetrics getMetrics(); static final String TYPE; static final String TYPE_SUBTYPES; static final String ENTITY; static final String TAG; static final String GENERAL; static final String SYSTEM; }
@Test public void testMapTypeIsValidValue() { for (Object value : validValues) { assertTrue(intIntMapType.isValidValue(value), "value=" + value); } for (Object value : invalidValues) { assertFalse(intIntMapType.isValidValue(value), "value=" + value); } }
@Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } } } else { return false; } } return true; }
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } } } else { return false; } } return true; } }
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } } } else { return false; } } return true; } AtlasMapType(AtlasType keyType, AtlasType valueType); AtlasMapType(String keyTypeName, String valueTypeName, AtlasTypeRegistry typeRegistry); }
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } } } else { return false; } } return true; } AtlasMapType(AtlasType keyType, AtlasType valueType); AtlasMapType(String keyTypeName, String valueTypeName, AtlasTypeRegistry typeRegistry); String getKeyTypeName(); String getValueTypeName(); AtlasType getKeyType(); AtlasType getValueType(); void setKeyType(AtlasType keyType); @Override Map<Object, Object> createDefaultValue(); @Override boolean isValidValue(Object obj); @Override boolean areEqualValues(Object val1, Object val2, Map<String, String> guidAssignments); @Override boolean isValidValueForUpdate(Object obj); @Override Map<Object, Object> getNormalizedValue(Object obj); @Override Map<Object, Object> getNormalizedValueForUpdate(Object obj); @Override boolean validateValue(Object obj, String objName, List<String> messages); @Override boolean validateValueForUpdate(Object obj, String objName, List<String> messages); @Override AtlasType getTypeForAttribute(); }
AtlasMapType extends AtlasType { @Override public boolean isValidValue(Object obj) { if (obj != null) { if (obj instanceof Map) { Map<Object, Objects> map = (Map<Object, Objects>) obj; for (Map.Entry e : map.entrySet()) { if (!keyType.isValidValue(e.getKey()) || !valueType.isValidValue(e.getValue())) { return false; } } } else { return false; } } return true; } AtlasMapType(AtlasType keyType, AtlasType valueType); AtlasMapType(String keyTypeName, String valueTypeName, AtlasTypeRegistry typeRegistry); String getKeyTypeName(); String getValueTypeName(); AtlasType getKeyType(); AtlasType getValueType(); void setKeyType(AtlasType keyType); @Override Map<Object, Object> createDefaultValue(); @Override boolean isValidValue(Object obj); @Override boolean areEqualValues(Object val1, Object val2, Map<String, String> guidAssignments); @Override boolean isValidValueForUpdate(Object obj); @Override Map<Object, Object> getNormalizedValue(Object obj); @Override Map<Object, Object> getNormalizedValueForUpdate(Object obj); @Override boolean validateValue(Object obj, String objName, List<String> messages); @Override boolean validateValueForUpdate(Object obj, String objName, List<String> messages); @Override AtlasType getTypeForAttribute(); }
@Test(dependsOnMethods = "filterInternalType") public void createsNewProfile() throws AtlasBaseException { for (int i = 0; i < NUM_USERS; i++) { AtlasUserProfile expected = getAtlasUserProfile(i); AtlasUserProfile actual = userProfileService.saveUserProfile(expected); assertNotNull(actual); assertEquals(expected.getName(), actual.getName()); assertEquals(expected.getFullName(), actual.getFullName()); assertNotNull(actual.getGuid()); } }
public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); }
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } }
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } @Inject UserProfileService(DataAccess dataAccess); }
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch); AtlasUserSavedSearch updateSavedSearch(AtlasUserSavedSearch savedSearch); List<AtlasUserSavedSearch> getSavedSearches(String userName); AtlasUserSavedSearch getSavedSearch(String userName, String searchName); AtlasUserSavedSearch getSavedSearch(String guid); void deleteUserProfile(String userName); void deleteSavedSearch(String guid); void deleteSearchBySearchName(String userName, String searchName); }
UserProfileService { public AtlasUserProfile saveUserProfile(AtlasUserProfile profile) throws AtlasBaseException { return dataAccess.save(profile); } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch); AtlasUserSavedSearch updateSavedSearch(AtlasUserSavedSearch savedSearch); List<AtlasUserSavedSearch> getSavedSearches(String userName); AtlasUserSavedSearch getSavedSearch(String userName, String searchName); AtlasUserSavedSearch getSavedSearch(String guid); void deleteUserProfile(String userName); void deleteSavedSearch(String guid); void deleteSearchBySearchName(String userName, String searchName); }
@Test(dependsOnMethods = "saveSearchesForUser", expectedExceptions = AtlasBaseException.class) public void attemptToAddExistingSearch() throws AtlasBaseException { String userName = getIndexBasedUserName(0); SearchParameters expectedSearchParameter = getActualSearchParameters(); for (int j = 0; j < NUM_SEARCHES; j++) { String queryName = getIndexBasedQueryName(j); AtlasUserSavedSearch expected = getDefaultSavedSearch(userName, queryName, expectedSearchParameter); AtlasUserSavedSearch actual = userProfileService.addSavedSearch(expected); assertNotNull(actual); assertNotNull(actual.getGuid()); assertEquals(actual.getOwnerName(), expected.getOwnerName()); assertEquals(actual.getName(), expected.getName()); assertEquals(actual.getSearchType(), expected.getSearchType()); assertEquals(actual.getSearchParameters(), expected.getSearchParameters()); } }
public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { userProfile = new AtlasUserProfile(userName); } checkIfQueryAlreadyExists(savedSearch, userProfile); userProfile.getSavedSearches().add(savedSearch); userProfile = dataAccess.save(userProfile); for (AtlasUserSavedSearch s : userProfile.getSavedSearches()) { if(s.getName().equals(savedSearch.getName())) { return s; } } return savedSearch; }
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { userProfile = new AtlasUserProfile(userName); } checkIfQueryAlreadyExists(savedSearch, userProfile); userProfile.getSavedSearches().add(savedSearch); userProfile = dataAccess.save(userProfile); for (AtlasUserSavedSearch s : userProfile.getSavedSearches()) { if(s.getName().equals(savedSearch.getName())) { return s; } } return savedSearch; } }
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { userProfile = new AtlasUserProfile(userName); } checkIfQueryAlreadyExists(savedSearch, userProfile); userProfile.getSavedSearches().add(savedSearch); userProfile = dataAccess.save(userProfile); for (AtlasUserSavedSearch s : userProfile.getSavedSearches()) { if(s.getName().equals(savedSearch.getName())) { return s; } } return savedSearch; } @Inject UserProfileService(DataAccess dataAccess); }
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { userProfile = new AtlasUserProfile(userName); } checkIfQueryAlreadyExists(savedSearch, userProfile); userProfile.getSavedSearches().add(savedSearch); userProfile = dataAccess.save(userProfile); for (AtlasUserSavedSearch s : userProfile.getSavedSearches()) { if(s.getName().equals(savedSearch.getName())) { return s; } } return savedSearch; } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch); AtlasUserSavedSearch updateSavedSearch(AtlasUserSavedSearch savedSearch); List<AtlasUserSavedSearch> getSavedSearches(String userName); AtlasUserSavedSearch getSavedSearch(String userName, String searchName); AtlasUserSavedSearch getSavedSearch(String guid); void deleteUserProfile(String userName); void deleteSavedSearch(String guid); void deleteSearchBySearchName(String userName, String searchName); }
UserProfileService { public AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch) throws AtlasBaseException { String userName = savedSearch.getOwnerName(); AtlasUserProfile userProfile = null; try { userProfile = getUserProfile(userName); } catch (AtlasBaseException excp) { } if (userProfile == null) { userProfile = new AtlasUserProfile(userName); } checkIfQueryAlreadyExists(savedSearch, userProfile); userProfile.getSavedSearches().add(savedSearch); userProfile = dataAccess.save(userProfile); for (AtlasUserSavedSearch s : userProfile.getSavedSearches()) { if(s.getName().equals(savedSearch.getName())) { return s; } } return savedSearch; } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch); AtlasUserSavedSearch updateSavedSearch(AtlasUserSavedSearch savedSearch); List<AtlasUserSavedSearch> getSavedSearches(String userName); AtlasUserSavedSearch getSavedSearch(String userName, String searchName); AtlasUserSavedSearch getSavedSearch(String guid); void deleteUserProfile(String userName); void deleteSavedSearch(String guid); void deleteSearchBySearchName(String userName, String searchName); }
@Test(dependsOnMethods = "attemptToAddExistingSearch") public void verifySavedSearchesForUser() throws AtlasBaseException { String userName = getIndexBasedUserName(0); List<AtlasUserSavedSearch> searches = userProfileService.getSavedSearches(userName); List<String> names = getIndexBasedQueryNamesList(); for (int i = 0; i < names.size(); i++) { assertTrue(names.contains(searches.get(i).getName()), searches.get(i).getName() + " failed!"); } }
public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; }
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } }
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } @Inject UserProfileService(DataAccess dataAccess); }
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch); AtlasUserSavedSearch updateSavedSearch(AtlasUserSavedSearch savedSearch); List<AtlasUserSavedSearch> getSavedSearches(String userName); AtlasUserSavedSearch getSavedSearch(String userName, String searchName); AtlasUserSavedSearch getSavedSearch(String guid); void deleteUserProfile(String userName); void deleteSavedSearch(String guid); void deleteSearchBySearchName(String userName, String searchName); }
UserProfileService { public List<AtlasUserSavedSearch> getSavedSearches(String userName) throws AtlasBaseException { AtlasUserProfile profile = null; try { profile = getUserProfile(userName); } catch (AtlasBaseException excp) { } return (profile != null) ? profile.getSavedSearches() : null; } @Inject UserProfileService(DataAccess dataAccess); AtlasUserProfile saveUserProfile(AtlasUserProfile profile); AtlasUserProfile getUserProfile(String userName); AtlasUserSavedSearch addSavedSearch(AtlasUserSavedSearch savedSearch); AtlasUserSavedSearch updateSavedSearch(AtlasUserSavedSearch savedSearch); List<AtlasUserSavedSearch> getSavedSearches(String userName); AtlasUserSavedSearch getSavedSearch(String userName, String searchName); AtlasUserSavedSearch getSavedSearch(String guid); void deleteUserProfile(String userName); void deleteSavedSearch(String guid); void deleteSearchBySearchName(String userName, String searchName); }