target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test public void testPut() { TestActorRef<MCacheDb> actorRef = TestActorRef.create(actorSystem, Props.create(MCacheDb.class)); actorRef.tell(new SetRequest("key", "value"), ActorRef.noSender()); MCacheDb mCacheDb = actorRef.underlyingActor(); Assert.assertEquals(mCacheDb.getValue("key"), "value"); }
public String getValue(String key) { return this.map.get(key); }
MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } }
MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } }
MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } @Override Receive createReceive(); String getValue(String key); }
MCacheDb extends AbstractActor { public String getValue(String key) { return this.map.get(key); } @Override Receive createReceive(); String getValue(String key); }
@Test public void testSayHelloProxy() { Person person = new Person(); person.setFirstName("Jane"); person.setLastName("Doe"); Greeting greeting = helloWorldRequesterProxy.sayHello(person); assertEquals("Hello Jane Doe!", greeting.getText()); }
@Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting response = factory.createGreeting(); String greeting = "Hello " + firstName + " " + lasttName + "!"; LOGGER.info("greeting={}", greeting); response.setText(greeting); return response; }
HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting response = factory.createGreeting(); String greeting = "Hello " + firstName + " " + lasttName + "!"; LOGGER.info("greeting={}", greeting); response.setText(greeting); return response; } }
HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting response = factory.createGreeting(); String greeting = "Hello " + firstName + " " + lasttName + "!"; LOGGER.info("greeting={}", greeting); response.setText(greeting); return response; } }
HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting response = factory.createGreeting(); String greeting = "Hello " + firstName + " " + lasttName + "!"; LOGGER.info("greeting={}", greeting); response.setText(greeting); return response; } @Override Greeting sayHello(Person person); }
HelloWorldImpl implements HelloWorldPortType { @Override public Greeting sayHello(Person person) { String firstName = person.getFirstName(); LOGGER.info("firstName={}", firstName); String lasttName = person.getLastName(); LOGGER.info("lastName={}", lasttName); ObjectFactory factory = new ObjectFactory(); Greeting response = factory.createGreeting(); String greeting = "Hello " + firstName + " " + lasttName + "!"; LOGGER.info("greeting={}", greeting); response.setText(greeting); return response; } @Override Greeting sayHello(Person person); }
@Test public void testResolveWithoutAnyServiceMfaAttributes() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); final DefaultRegisteredServiceMfaRoleProcessorImpl resolver = new DefaultRegisteredServiceMfaRoleProcessorImpl( getMFWASF(was), getAMCP(), getServicesManager(rswa)); final List<MultiFactorAuthenticationRequestContext> result = resolver.resolve(auth, was); assertNotNull(result); assertEquals(0, result.size()); }
public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
@Test public void testResolveWithoutIncompleteServiceMfaAttributes() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); DefaultRegisteredServiceProperty prop = new DefaultRegisteredServiceProperty(); prop.setValues(Collections.singleton(CAS_AUTHN_METHOD)); rswa.getProperties().put(MultiFactorAuthenticationSupportingWebApplicationService.CONST_PARAM_AUTHN_METHOD, prop); prop = new DefaultRegisteredServiceProperty(); prop.setValues(Collections.singleton(MEMBER_OF_VALUE)); rswa.getProperties().put(RegisteredServiceMfaRoleProcessor.MFA_ATTRIBUTE_PATTERN, prop); final DefaultRegisteredServiceMfaRoleProcessorImpl resolver = new DefaultRegisteredServiceMfaRoleProcessorImpl( getMFWASF(was), getAMCP(), getServicesManager(rswa)); final List<MultiFactorAuthenticationRequestContext> result = resolver.resolve(auth, was); assertNotNull(result); assertEquals(0, result.size()); }
public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
@Test public void testResolveServiceWithMfaAttributesUserInRole() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); DefaultRegisteredServiceProperty prop = new DefaultRegisteredServiceProperty(); prop.setValues(Collections.singleton(CAS_AUTHN_METHOD)); rswa.getProperties().put(MultiFactorAuthenticationSupportingWebApplicationService.CONST_PARAM_AUTHN_METHOD, prop); prop = new DefaultRegisteredServiceProperty(); prop.setValues(Collections.singleton(MEMBER_OF)); rswa.getProperties().put(RegisteredServiceMfaRoleProcessor.MFA_ATTRIBUTE_NAME, prop); prop = new DefaultRegisteredServiceProperty(); prop.setValues(Collections.singleton(MEMBER_OF_VALUE)); rswa.getProperties().put(RegisteredServiceMfaRoleProcessor.MFA_ATTRIBUTE_PATTERN, prop); final DefaultRegisteredServiceMfaRoleProcessorImpl resolver = new DefaultRegisteredServiceMfaRoleProcessorImpl( getMFWASF(was), getAMCP(), getServicesManager(rswa)); final List<MultiFactorAuthenticationRequestContext> result = resolver.resolve(auth, was); assertNotNull(result); assertEquals(CAS_AUTHN_METHOD, result.get(0).getMfaService().getAuthenticationMethod()); }
public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
@Test public void testResolveServiceWithOnlyAuthnMethodAttribute() throws Exception { final WebApplicationService was = getTargetService(); final Authentication auth = getAuthentication(true); final RegisteredService rswa = TestUtils.getRegisteredService("test1"); final DefaultRegisteredServiceProperty prop = new DefaultRegisteredServiceProperty(); prop.setValues(Collections.singleton(CAS_AUTHN_METHOD)); rswa.getProperties().put(MultiFactorAuthenticationSupportingWebApplicationService.CONST_PARAM_AUTHN_METHOD, prop); final DefaultRegisteredServiceMfaRoleProcessorImpl resolver = new DefaultRegisteredServiceMfaRoleProcessorImpl( getMFWASF(was), getAMCP(), getServicesManager(rswa)); final List<MultiFactorAuthenticationRequestContext> result = resolver.resolve(auth, was); assertNotNull(result); assertEquals(0, result.size()); }
public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
DefaultRegisteredServiceMfaRoleProcessorImpl implements RegisteredServiceMfaRoleProcessor { public List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService) { String authenticationMethodAttributeName = null; final List<MultiFactorAuthenticationRequestContext> list = new ArrayList<>(); if (authentication != null && targetService != null) { final ServiceMfaData serviceMfaData = getServicesAuthenticationData(targetService); if (serviceMfaData == null || !serviceMfaData.isValid()) { logger.debug("No specific mfa role service attributes found"); return list; } logger.debug("Found MFA Role: {}", serviceMfaData); authenticationMethodAttributeName = serviceMfaData.attributeName; final Object mfaAttributeValueAsObject = authentication.getPrincipal().getAttributes().get(serviceMfaData.attributeName); if (mfaAttributeValueAsObject != null) { if (mfaAttributeValueAsObject instanceof String) { final String mfaAttributeValue = mfaAttributeValueAsObject.toString(); final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } else if (mfaAttributeValueAsObject instanceof List) { final List<String> mfaAttributeValues = (List<String>) mfaAttributeValueAsObject; for (final String mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, mfaAttributeValue, targetService); if (ctx != null) { list.add(ctx); } } } else if (mfaAttributeValueAsObject instanceof Collection) { final Collection mfaAttributeValues = (Collection) mfaAttributeValueAsObject; for (final Object mfaAttributeValue : mfaAttributeValues) { final MultiFactorAuthenticationRequestContext ctx = getMfaRequestContext( serviceMfaData, String.valueOf(mfaAttributeValue), targetService); if (ctx != null) { list.add(ctx); } } } else { logger.debug("No MFA attribute found."); } } } if (list.isEmpty()) { logger.debug("No multifactor authentication requests could be resolved based on [{}].", authenticationMethodAttributeName); return null; } return list; } DefaultRegisteredServiceMfaRoleProcessorImpl( final MultiFactorWebApplicationServiceFactory mfaServiceFactory, final AuthenticationMethodConfigurationProvider authenticationMethodConfiguration, final ServicesManager servicesManager); List<MultiFactorAuthenticationRequestContext> resolve(@NotNull final Authentication authentication, @NotNull final WebApplicationService targetService); }
@Test public void shouldRemoveRevisionsOnly() throws Exception { final int NUMBER_OF_REVISIONS = 3; final int NUMBER_OF_RESPONSES = 2; RevisionInformation revisionInformation = new RevisionInformation("DATASET", DATA_PROVIDER, SOURCE + REPRESENTATION_NAME, REVISION_NAME, REVISION_PROVIDER, getUTCDateString(date)); revisionRemoverJob.setRevisionInformation(revisionInformation); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, date); List<Representation> representations = new ArrayList<>(1); representations.add(representation); List<Revision> revisions = getRevisions(NUMBER_OF_REVISIONS); representation.setRevisions(revisions); ResultSlice<CloudTagsResponse> resultSlice = getCloudTagsResponseResultSlice(NUMBER_OF_RESPONSES); when(dataSetServiceClient.getDataSetRevisionsChunk(anyString(), anyString(), anyString(), anyString(), anyString(), anyString(), anyString(), anyInt())).thenReturn(resultSlice); when(recordServiceClient.getRepresentationsByRevision(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, REVISION_NAME, REVISION_PROVIDER, getUTCDateString(date))).thenReturn(Arrays.asList(representation)); when(recordServiceClient.getRepresentationsByRevision(SOURCE + CLOUD_ID2, SOURCE + REPRESENTATION_NAME, REVISION_NAME, REVISION_PROVIDER, getUTCDateString(date))).thenReturn(Arrays.asList(representation)); Thread thread = new Thread(revisionRemoverJob); thread.start(); thread.join(); verify(revisionServiceClient, times(NUMBER_OF_RESPONSES * NUMBER_OF_REVISIONS)).deleteRevision(anyString(), anyString(), anyString(), anyString(), anyString(), anyString()); verify(recordServiceClient, times(0)).deleteRepresentation(anyString(), anyString(), anyString()); }
void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; }
RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } }
RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } RevisionRemoverJob(DataSetServiceClient dataSetServiceClient, RecordServiceClient recordServiceClient, RevisionInformation revisionInformation, RevisionServiceClient revisionServiceClient); }
RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } RevisionRemoverJob(DataSetServiceClient dataSetServiceClient, RecordServiceClient recordServiceClient, RevisionInformation revisionInformation, RevisionServiceClient revisionServiceClient); @Override void run(); }
RevisionRemoverJob implements Runnable { void setRevisionInformation(RevisionInformation revisionInformation) { this.revisionInformation = revisionInformation; } RevisionRemoverJob(DataSetServiceClient dataSetServiceClient, RecordServiceClient recordServiceClient, RevisionInformation revisionInformation, RevisionServiceClient revisionServiceClient); @Override void run(); }
@Test public void shouldEmitSameTupleWhenNoResourcesHasToBeChecked() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = prepareTupleWithLinksCountEqualsToZero(); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(1)).emit( eq("NotificationStream"), eq(anchorTuple), captor.capture()); validateCapturedValues(captor); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
@Test public void shouldCheckOneLinkWithoutEmittingTuple() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = prepareRandomTuple(); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(0)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList()); verify(linkChecker, times(1)).performLinkChecking(tuple.getParameter(PluginParameterKeys.RESOURCE_URL)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
@Test public void shouldEmitTupleAfterCheckingAllResourcesFromFile() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple tuple = prepareRandomTuple(); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(0)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList()); verify(linkChecker, times(1)).performLinkChecking(tuple.getParameter(PluginParameterKeys.RESOURCE_URL)); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(0)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList()); verify(linkChecker, times(2)).performLinkChecking(tuple.getParameter(PluginParameterKeys.RESOURCE_URL)); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(0)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList()); verify(linkChecker, times(3)).performLinkChecking(tuple.getParameter(PluginParameterKeys.RESOURCE_URL)); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(0)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList()); verify(linkChecker, times(4)).performLinkChecking(tuple.getParameter(PluginParameterKeys.RESOURCE_URL)); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(1)).emit(eq("NotificationStream"), any(Tuple.class), Mockito.anyList()); verify(linkChecker, times(5)).performLinkChecking(Mockito.anyString()); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
@Test public void shouldEmitTupleWithErrorIncluded() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); doThrow(new LinkCheckingException(new Throwable())).when(linkChecker).performLinkChecking(Mockito.anyString()); StormTaskTuple tuple = prepareRandomTuple(); linkCheckBolt.execute(anchorTuple, tuple); linkCheckBolt.execute(anchorTuple, tuple); linkCheckBolt.execute(anchorTuple, tuple); linkCheckBolt.execute(anchorTuple, tuple); linkCheckBolt.execute(anchorTuple, tuple); verify(outputCollector, times(1)).emit(eq("NotificationStream"), any(Tuple.class), captor.capture()); validateCapturedValuesForError(captor); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
LinkCheckBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple tuple) { ResourceInfo resourceInfo = readResourceInfoFromTuple(tuple); if (!hasLinksForCheck(resourceInfo)) { emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "The EDM file has no resources", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); outputCollector.ack(anchorTuple); } else { FileInfo edmFile = checkProvidedLink(tuple, resourceInfo); edmFile.addSourceTuple(anchorTuple); if (isFileFullyProcessed(edmFile)) { cache.remove(edmFile.fileUrl); if (edmFile.errors == null || edmFile.errors.isEmpty()) emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); else emitSuccessNotification(anchorTuple, tuple.getTaskId(), tuple.getFileUrl(), "", "", "", "resource exception", edmFile.errors, StormTaskTupleHelper.getRecordProcessingStartTime(tuple)); ackAllSourceTuplesForFile(edmFile); } } } @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple tuple); }
@Test public void enrichEdmInternalSuccessfully() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/Item_35834473_test.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, new HashMap<String, String>(), null); String fileContent = new String(tuple.getFileData()); when(enrichmentWorker.process(eq(fileContent))).thenReturn("enriched file content"); enrichmentBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(1)).emit(Mockito.any(List.class)); Mockito.verify(outputCollector, Mockito.times(0)).emit(Mockito.eq(AbstractDpsBolt.NOTIFICATION_STREAM_NAME), Mockito.any(List.class)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } EnrichmentBolt(String dereferenceURL, String enrichmentURL); }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } EnrichmentBolt(String dereferenceURL, String enrichmentURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } EnrichmentBolt(String dereferenceURL, String enrichmentURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void sendErrorNotificationWhenTheEnrichmentFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); byte[] FILE_DATA = Files.readAllBytes(Paths.get("src/test/resources/example1.xml")); StormTaskTuple tuple = new StormTaskTuple(TASK_ID, TASK_NAME, SOURCE_VERSION_URL, FILE_DATA, prepareStormTaskTupleParameters(), null); String fileContent = new String(tuple.getFileData()); String errorMessage = "Dereference or Enrichment Exception"; given(enrichmentWorker.process(eq(fileContent))).willThrow(new DereferenceOrEnrichException(errorMessage, new Throwable())); enrichmentBolt.execute(anchorTuple, tuple); Mockito.verify(outputCollector, Mockito.times(0)).emit(Mockito.any(List.class)); Mockito.verify(outputCollector, Mockito.times(1)).emit(Mockito.eq(AbstractDpsBolt.NOTIFICATION_STREAM_NAME), any(Tuple.class), captor.capture()); Values capturedValues = captor.getValue(); Map val = (Map) capturedValues.get(2); Assert.assertTrue(val.get("additionalInfo").toString().contains("emote Enrichment/dereference service caused the problem!. The full error:")); Assert.assertTrue(val.get("additionalInfo").toString().contains(errorMessage)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } EnrichmentBolt(String dereferenceURL, String enrichmentURL); }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } EnrichmentBolt(String dereferenceURL, String enrichmentURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
EnrichmentBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try { String fileContent = new String(stormTaskTuple.getFileData()); LOGGER.info("starting enrichment on {} .....", stormTaskTuple.getFileUrl()); String output = enrichmentWorker.process(fileContent); LOGGER.info("Finishing enrichment on {} .....", stormTaskTuple.getFileUrl()); emitEnrichedContent(anchorTuple, stormTaskTuple, output); } catch (Exception e) { LOGGER.error("Exception while Enriching/dereference", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Remote Enrichment/dereference service caused the problem!. The full error: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } EnrichmentBolt(String dereferenceURL, String enrichmentURL); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void harvestingForAllParametersSpecified() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenReturn(fileContentAsStream); StormTaskTuple task = taskWithAllNeededParameters(); StormTaskTuple spiedTask = spy(task); recordHarvestingBolt.execute(anchorTuple, spiedTask); verifySuccessfulEmit(); verify(spiedTask).setFileData(Mockito.any(InputStream.class)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldHarvestRecordInEDMAndExtractIdentifiers() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenReturn(fileContentAsStream); StormTaskTuple task = taskWithAllNeededParameters(); StormTaskTuple spiedTask = spy(task); recordHarvestingBolt.execute(anchorTuple, spiedTask); verifySuccessfulEmit(); verify(spiedTask).setFileData(Mockito.any(InputStream.class)); assertEquals("http: assertEquals("/2020739_Ag_EU_CARARE_2Cultur/object_DCU_24927017", spiedTask.getParameter(PluginParameterKeys.CLOUD_LOCAL_IDENTIFIER)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldHarvestRecordInEDMAndNotUseHeaderIdentifierIfParameterIsDifferentThanTrue() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenReturn(fileContentAsStream); StormTaskTuple task = taskWithGivenValueOfUseHeaderIdentifiersParameter("blablaba"); StormTaskTuple spiedTask = spy(task); recordHarvestingBolt.execute(anchorTuple, spiedTask); verifySuccessfulEmit(); verify(spiedTask).setFileData(Mockito.any(InputStream.class)); assertEquals("http: assertEquals("/2020739_Ag_EU_CARARE_2Cultur/object_DCU_24927017", spiedTask.getParameter(PluginParameterKeys.CLOUD_LOCAL_IDENTIFIER)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldHarvestRecordInEDMAndUseHeaderIdentifierIfSpecifiedInTaskParameters() throws IOException, HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenReturn(fileContentAsStream); StormTaskTuple task = taskWithGivenValueOfUseHeaderIdentifiersParameter("true"); StormTaskTuple spiedTask = spy(task); recordHarvestingBolt.execute(anchorTuple, spiedTask); verifySuccessfulEmit(); verify(spiedTask).setFileData(Mockito.any(InputStream.class)); assertNull( spiedTask.getParameter(PluginParameterKeys.ADDITIONAL_LOCAL_IDENTIFIER)); assertEquals("http: }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldInvokeAllTheRemovalStepsExcludingErrorReports() { removerInvoker.executeInvokerForSingleTask(TASK_ID, false); verify(remover, times(1)).removeNotifications((eq(TASK_ID))); verify(remover, times(1)).removeStatistics((eq(TASK_ID))); verify(remover, times(0)).removeErrorReports((eq(TASK_ID))); }
public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } RemoverInvoker(Remover remover); }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } RemoverInvoker(Remover remover); void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors); void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors); }
RemoverInvoker { public void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors) { remover.removeNotifications(taskId); LOGGER.info("Logs for task Id:" + taskId + " were removed successfully"); LOGGER.info("Removing statistics for:" + taskId + " was started. This step could take times depending on the size of the task"); remover.removeStatistics(taskId); LOGGER.info("Statistics for task Id:" + taskId + " were removed successfully"); if (shouldRemoveErrors) { remover.removeErrorReports(taskId); LOGGER.info("Error reports for task Id:" + taskId + " were removed successfully"); } } RemoverInvoker(Remover remover); void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors); void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors); }
@Test public void shouldHarvestRecordInEDMAndUseHeaderIdentifierAndTrimItIfSpecifiedInTaskParameters() throws IOException, HarvesterException { InputStream fileContentAsStream = getFileContentAsStream("/sampleEDMRecord.xml"); Tuple anchorTuple = mock(TupleImpl.class); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenReturn(fileContentAsStream); StormTaskTuple task = taskWithGivenValueOfUseHeaderIdentifiersAndTrimmingPrefix("true"); StormTaskTuple spiedTask = spy(task); recordHarvestingBolt.execute(anchorTuple, spiedTask); verifySuccessfulEmit(); verify(spiedTask).setFileData(Mockito.any(InputStream.class)); assertNull(spiedTask.getParameter(PluginParameterKeys.ADDITIONAL_LOCAL_IDENTIFIER)); assertEquals("/item/2064203/o_aj_kk_tei_3", spiedTask.getParameter(PluginParameterKeys.CLOUD_LOCAL_IDENTIFIER)); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldEmitErrorOnHarvestingExceptionWhenCannotExctractEuropeanaIdFromEDM() throws HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); InputStream fileContentAsStream = getFileContentAsStream("/corruptedEDMRecord.xml"); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenReturn(fileContentAsStream); StormTaskTuple task = taskWithAllNeededParameters(); StormTaskTuple spiedTask = spy(task); recordHarvestingBolt.execute(anchorTuple, spiedTask); verifyErrorEmit(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldEmitErrorOnHarvestingException() throws HarvesterException { Tuple anchorTuple = mock(TupleImpl.class); when(harvester.harvestRecord(anyString(), anyString(), anyString(), any(XPathExpression.class), any(XPathExpression.class))).thenThrow(new HarvesterException("Some!")); StormTaskTuple task = taskWithAllNeededParameters(); StormTaskTuple spiedTask = spy(task); recordHarvestingBolt.execute(anchorTuple, spiedTask); verifyErrorEmit(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void harvestingForEmptyUrl() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple task = taskWithoutResourceUrl(); recordHarvestingBolt.execute(anchorTuple, task); verifyErrorEmit(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void harvestingForEmptyRecordId() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple task = taskWithoutRecordId(); recordHarvestingBolt.execute(anchorTuple, task); verifyErrorEmit(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void harvestForEmptyPrefix() { Tuple anchorTuple = mock(TupleImpl.class); StormTaskTuple task = taskWithoutPrefix(); recordHarvestingBolt.execute(anchorTuple, task); verifyErrorEmit(); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
RecordHarvestingBolt extends AbstractDpsBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { long harvestingStartTime = new Date().getTime(); LOGGER.info("Starting harvesting for: {}", stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); String endpointLocation = readEndpointLocation(stormTaskTuple); String recordId = readRecordId(stormTaskTuple); String metadataPrefix = readMetadataPrefix(stormTaskTuple); if (parametersAreValid(endpointLocation, recordId, metadataPrefix)) { LOGGER.info("OAI Harvesting started for: {} and {}", recordId, endpointLocation); try (final InputStream record = harvester.harvestRecord(endpointLocation, recordId, metadataPrefix, expr, isDeletedExpression)) { stormTaskTuple.setFileData(record); if (useHeaderIdentifier(stormTaskTuple)) trimLocalId(stormTaskTuple); else useEuropeanaId(stormTaskTuple); outputCollector.emit(anchorTuple, stormTaskTuple.toStormTuple()); LOGGER.info("Harvesting finished successfully for: {} and {}", recordId, endpointLocation); } catch (HarvesterException | IOException | EuropeanaIdException e) { LOGGER.error("Exception on harvesting", e); emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), "Error while harvesting a record", "The full error is: " + e.getMessage() + ". The cause of the error is: " + e.getCause(), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); LOGGER.error(e.getMessage()); } } else { emitErrorNotification( anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getParameter(DPS_TASK_INPUT_DATA), "Invalid parameters", null, StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } LOGGER.info("Harvesting finished in: {}ms for {}", Calendar.getInstance().getTimeInMillis() - harvestingStartTime, stormTaskTuple.getParameter(CLOUD_LOCAL_IDENTIFIER)); outputCollector.ack(anchorTuple); } @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldEmmitNotificationWhenDataSetListHasOneElement() throws MCSException, IOException { when(fileServiceClient.getFile(eq(FILE_URL),eq(AUTHORIZATION), eq(AUTHORIZATION_HEADER))).thenReturn(null); verifyMethodExecutionNumber(1, 0, FILE_URL); }
private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple t); }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple t); }
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingMCSException() throws MCSException, IOException { doThrow(MCSException.class).when(fileServiceClient).getFile(eq(FILE_URL),eq(AUTHORIZATION), eq(AUTHORIZATION_HEADER)); verifyMethodExecutionNumber(4, 1, FILE_URL); }
private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple t); }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple t); }
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingDriverException() throws MCSException, IOException { doThrow(DriverException.class).when(fileServiceClient).getFile(eq(FILE_URL),eq(AUTHORIZATION), eq(AUTHORIZATION_HEADER)); verifyMethodExecutionNumber(4, 1, FILE_URL); }
private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple t); }
ReadFileBolt extends AbstractDpsBolt { private InputStream getFile(FileServiceClient fileClient, String file, String authorization) throws MCSException, IOException { int retries = DEFAULT_RETRIES; while (true) { try { return fileClient.getFile(file, AUTHORIZATION, authorization); } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while getting a file. Retries left:{} ", retries); waitForSpecificTime(); } else { LOGGER.error("Error while getting a file."); throw e; } } } } ReadFileBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple t); }
@Test public void successfulExecuteStormTupleWithExistedCloudId() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); CloudId cloudId = mock(CloudId.class); when(cloudId.getId()).thenReturn(SOURCE + CLOUD_ID); when(uisClient.getCloudId(SOURCE + DATA_PROVIDER, SOURCE + LOCAL_ID,AUTHORIZATION,AUTHORIZATION_HEADER)).thenReturn(cloudId); URI uri = new URI(SOURCE_VERSION_URL); when(recordServiceClient.createRepresentation(anyString(), anyString(), anyString(), any(InputStream.class), anyString(), anyString(),anyString(),anyString())).thenReturn(uri); oaiWriteRecordBoltT.execute(anchorTuple, getStormTaskTuple()); assertExecutionResults(); }
private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); @Override void prepare(); }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); @Override void prepare(); static final String ERROR_MSG_WHILE_CREATING_CLOUD_ID; static final String ERROR_MSG_WHILE_MAPPING_LOCAL_CLOUD_ID; static final String ERROR_MSG_WHILE_GETTING_CLOUD_ID; static final String ERROR_MSG_RETRIES; }
@Test public void shouldExecuteTheRemovalOnListOfTASKS() throws IOException { removerInvoker.executeInvokerForListOfTasks("src/test/resources/taskIds.csv", true); verify(remover, times(6)).removeNotifications(anyLong()); verify(remover, times(6)).removeStatistics((anyLong())); verify(remover, times(6)).removeErrorReports((anyLong())); }
public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldRemoveErrors); } }
RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldRemoveErrors); } } }
RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldRemoveErrors); } } RemoverInvoker(Remover remover); }
RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldRemoveErrors); } } RemoverInvoker(Remover remover); void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors); void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors); }
RemoverInvoker { public void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors) throws IOException { TaskIdsReader reader = new CommaSeparatorReaderImpl(); List<String> taskIds = reader.getTaskIds(filePath); for (String taskId : taskIds) { executeInvokerForSingleTask(Long.valueOf(taskId), shouldRemoveErrors); } } RemoverInvoker(Remover remover); void executeInvokerForSingleTask(long taskId, boolean shouldRemoveErrors); void executeInvokerForListOfTasks(String filePath, boolean shouldRemoveErrors); }
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingMCSException() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); CloudId cloudId = mock(CloudId.class); when(cloudId.getId()).thenReturn(SOURCE + CLOUD_ID); when(uisClient.getCloudId(SOURCE + DATA_PROVIDER, SOURCE + LOCAL_ID,AUTHORIZATION,AUTHORIZATION_HEADER)).thenReturn(cloudId); doThrow(MCSException.class).when(recordServiceClient).createRepresentation(anyString(), anyString(), anyString(), any(InputStream.class), anyString(), anyString(),anyString(),anyString()); oaiWriteRecordBoltT.execute(anchorTuple, getStormTaskTuple()); assertFailingExpectationWhenCreatingRepresentation(); }
private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); @Override void prepare(); }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); @Override void prepare(); static final String ERROR_MSG_WHILE_CREATING_CLOUD_ID; static final String ERROR_MSG_WHILE_MAPPING_LOCAL_CLOUD_ID; static final String ERROR_MSG_WHILE_GETTING_CLOUD_ID; static final String ERROR_MSG_RETRIES; }
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingDriverException() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); CloudId cloudId = mock(CloudId.class); when(cloudId.getId()).thenReturn(SOURCE + CLOUD_ID); when(uisClient.getCloudId(SOURCE + DATA_PROVIDER, SOURCE + LOCAL_ID,AUTHORIZATION,AUTHORIZATION_HEADER)).thenReturn(cloudId); doThrow(DriverException.class).when(recordServiceClient).createRepresentation(anyString(), anyString(), anyString(), any(InputStream.class), anyString(), anyString(),anyString(),anyString()); oaiWriteRecordBoltT.execute(anchorTuple, getStormTaskTuple()); assertFailingExpectationWhenCreatingRepresentation(); }
private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); @Override void prepare(); }
HarvestingWriteRecordBolt extends WriteRecordBolt { private String getCloudId(String authorizationHeader, String providerId, String localId, String additionalLocalIdentifier) throws CloudException { String result; CloudId cloudId; cloudId = getCloudId(providerId, localId, authorizationHeader); if (cloudId != null) { result = cloudId.getId(); } else { result = createCloudId(providerId, localId, authorizationHeader); } if (additionalLocalIdentifier != null) attachAdditionalLocalIdentifier(additionalLocalIdentifier, result, providerId, authorizationHeader); return result; } HarvestingWriteRecordBolt(String ecloudMcsAddress, String ecloudUisAddress); @Override void prepare(); static final String ERROR_MSG_WHILE_CREATING_CLOUD_ID; static final String ERROR_MSG_WHILE_MAPPING_LOCAL_CLOUD_ID; static final String ERROR_MSG_WHILE_GETTING_CLOUD_ID; static final String ERROR_MSG_RETRIES; }
@Test public void shouldParseFileAndEmitResources() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); when(taskStatusChecker.hasKillFlag(eq(TASK_ID))).thenReturn(false); parseFileBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, Mockito.times(5)).emit(captor.capture()); List<Values> capturedValuesList = captor.getAllValues(); assertEquals(4, capturedValuesList.size()); for (Values values : capturedValuesList) { assertEquals(8, values.size()); Map<String, String> val = (Map) values.get(4); assertNotNull(val); for (String parameterKey : val.keySet()) { assertTrue(expectedParametersKeysList.contains(parameterKey)); } } } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldDropTaskAndStopEmitting() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/Item_35834473.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); when(taskStatusChecker.hasKillFlag(eq(TASK_ID))).thenReturn(false).thenReturn(false).thenReturn(true); parseFileBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, Mockito.times(2)).emit(captor.capture()); } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldParseFileWithEmptyResourcesAndForwardOneTuple() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/no-resources.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); parseFileBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, Mockito.times(1)).emit(captor.capture()); Values values = captor.getValue(); assertNotNull(values); System.out.println(values); Map<String, String> map = (Map) values.get(4); System.out.println(map); assertEquals(2, map.size()); assertNull(map.get(PluginParameterKeys.RESOURCE_LINKS_COUNT)); assertNull(map.get(PluginParameterKeys.RESOURCE_LINK_KEY)); } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldEmitErrorWhenDownloadFileFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); doThrow(IOException.class).when(fileClient).getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION)); parseFileBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, Mockito.times(1)).emit(eq(NOTIFICATION_STREAM_NAME), any(Tuple.class), captor.capture()); Values values = captor.getValue(); assertNotNull(values); Map<String, String> valueMap = (Map) values.get(2); assertNotNull(valueMap); assertEquals(4, valueMap.size()); assertTrue(valueMap.get("additionalInfo").contains("Error while reading and parsing the EDM file")); assertEquals(RecordState.ERROR.toString(), valueMap.get("state")); assertNull(valueMap.get(PluginParameterKeys.RESOURCE_LINKS_COUNT)); verify(outputCollector, Mockito.times(0)).emit(anyList()); }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldEmitErrorWhenGettingResourceLinksFails() throws Exception { Tuple anchorTuple = mock(TupleImpl.class); try (InputStream stream = this.getClass().getResourceAsStream("/files/broken.xml")) { when(fileClient.getFile(eq(FILE_URL), eq(AUTHORIZATION), eq(AUTHORIZATION))).thenReturn(stream); parseFileBolt.execute(anchorTuple, stormTaskTuple); verify(outputCollector, Mockito.times(1)).emit(eq(NOTIFICATION_STREAM_NAME), any(Tuple.class), captor.capture()); Values values = captor.getValue(); assertNotNull(values); Map<String, String> valueMap = (Map) values.get(2); assertNotNull(valueMap); assertEquals(4, valueMap.size()); assertTrue(valueMap.get("additionalInfo").contains("Error while reading and parsing the EDM file")); assertEquals(RecordState.ERROR.toString(), valueMap.get("state")); assertNull(valueMap.get(PluginParameterKeys.RESOURCE_LINKS_COUNT)); verify(outputCollector, Mockito.times(0)).emit(anyList()); } }
@Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
ParseFileBolt extends ReadFileBolt { @Override public void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple) { try (InputStream stream = getFileStreamByStormTuple(stormTaskTuple)) { byte[] fileContent = IOUtils.toByteArray(stream); List<RdfResourceEntry> rdfResourceEntries = getResourcesFromRDF(fileContent); int linksCount = getLinksCount(stormTaskTuple, rdfResourceEntries.size()); if (linksCount == 0) { StormTaskTuple tuple = new Cloner().deepClone(stormTaskTuple); LOGGER.info("The EDM file has no resource Links "); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } else { for (RdfResourceEntry rdfResourceEntry : rdfResourceEntries) { if (AbstractDpsBolt.taskStatusChecker.hasKillFlag(stormTaskTuple.getTaskId())) break; StormTaskTuple tuple = createStormTuple(stormTaskTuple, rdfResourceEntry, linksCount); outputCollector.emit(anchorTuple, tuple.toStormTuple()); } } } catch (Exception e) { LOGGER.error("Unable to read and parse file ", e); emitErrorNotification(anchorTuple, stormTaskTuple.getTaskId(), stormTaskTuple.getFileUrl(), e.getMessage(), "Error while reading and parsing the EDM file. The full error is: " + ExceptionUtils.getStackTrace(e), StormTaskTupleHelper.getRecordProcessingStartTime(stormTaskTuple)); } outputCollector.ack(anchorTuple); } ParseFileBolt(String ecloudMcsAddress); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); @Override void prepare(); }
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingMCSException() throws MCSException { stormTaskTuple = prepareTupleWithSingleDataSet(); doThrow(MCSException.class).when(dataSetServiceClient).assignRepresentationToDataSet(anyString(), anyString(), anyString(), anyString(), anyString(),eq(AUTHORIZATION),eq(AUTHORIZATION)); verifyMethodExecutionNumber(4, 1); }
private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } AddResultToDataSetBolt(String ecloudMcsAddress); }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } AddResultToDataSetBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } AddResultToDataSetBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
@Test public void shouldRetry3TimesBeforeFailingWhenThrowingDriverException() throws MCSException { stormTaskTuple = prepareTupleWithSingleDataSet(); doThrow(DriverException.class).when(dataSetServiceClient).assignRepresentationToDataSet(anyString(), anyString(), anyString(), anyString(), anyString(),eq(AUTHORIZATION),eq(AUTHORIZATION)); verifyMethodExecutionNumber(4, 1); }
private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } AddResultToDataSetBolt(String ecloudMcsAddress); }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } AddResultToDataSetBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
AddResultToDataSetBolt extends AbstractDpsBolt { private void assignRepresentationToDataSet(DataSet dataSet, Representation resultRepresentation, String authorizationHeader) throws MCSException { int retries = DEFAULT_RETRIES; while (true) { try { dataSetServiceClient.assignRepresentationToDataSet( dataSet.getProviderId(), dataSet.getId(), resultRepresentation.getCloudId(), resultRepresentation.getRepresentationName(), resultRepresentation.getVersion(), AUTHORIZATION, authorizationHeader); break; } catch (Exception e) { if (retries-- > 0) { LOGGER.warn("Error while assigning record to dataset. Retries left: {}", retries); waitForSpecificTime(); } else { LOGGER.error("Error while assigning record to dataset."); throw e; } } } } AddResultToDataSetBolt(String ecloudMcsAddress); @Override void prepare(); @Override void execute(Tuple anchorTuple, StormTaskTuple stormTaskTuple); }
@Test public void testAddingToQueueSuccessfully() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date()); assertEquals(0, queueFiller.tuplesWithFileUrls.size()); for (int i = 0; i < 10; i++) queueFiller.addTupleToQueue(new StormTaskTuple(), new FileServiceClient(BASE_URL), representation); assertEquals(10, queueFiller.tuplesWithFileUrls.size()); }
public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation); }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation); }
@Test public void shouldCreateTheCorrectFilePath() throws Exception { String filePath = FileUtil.createFilePath(FOLDER_PATH, FILE_NAME_WITHOUT_EXTENSION, EXTENSION); assertEquals(filePath, FILE_PATH); filePath = FileUtil.createFilePath(FOLDER_PATH, FILE_NAME_WITH_EXTENSION, EXTENSION); assertEquals(filePath, FILE_PATH); }
public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; }
FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } }
FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } }
FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } static void persistStreamToFile(InputStream inputStream, String folderPath, String fileName, String extension); static String createFilePath(String folderPath, String fileName, String extension); static String createFolder(); static String createZipFolderPath(Date date); }
FileUtil { public static String createFilePath(String folderPath, String fileName, String extension) { String filePtah = folderPath + fileName; if ("".equals(FilenameUtils.getExtension(fileName))) filePtah = filePtah + extension; return filePtah; } static void persistStreamToFile(InputStream inputStream, String folderPath, String fileName, String extension); static String createFilePath(String folderPath, String fileName, String extension); static String createFolder(); static String createZipFolderPath(Date date); }
@Test public void testKillingTheTaskEffectOnQueue() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false, false, false, true); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date()); assertEquals(0, queueFiller.tuplesWithFileUrls.size()); for (int i = 0; i < 10; i++) queueFiller.addTupleToQueue(new StormTaskTuple(), new FileServiceClient(BASE_URL), representation); assertEquals(3, queueFiller.tuplesWithFileUrls.size()); }
public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation); }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation); }
@Test public void shouldEmitErrorsInCaseOfExceptionWhileGettingTheFiles() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date()); FileServiceClient fileServiceClient = Mockito.mock(FileServiceClient.class); doThrow(MCSException.class).when(fileServiceClient).getFile(anyString(),anyString(),anyString(),anyString()); assertEquals(0, queueFiller.tuplesWithFileUrls.size()); for (int i = 0; i < 10; i++) queueFiller.addTupleToQueue(new StormTaskTuple(), fileServiceClient, representation); assertEquals(0, queueFiller.tuplesWithFileUrls.size()); verify(collector, times(10)).emit(Matchers.eq(AbstractDpsBolt.NOTIFICATION_STREAM_NAME), anyListOf(Object.class)); }
public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation); }
QueueFiller { public int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation) { int count = 0; final long taskId = stormTaskTuple.getTaskId(); if (representation != null) { for (eu.europeana.cloud.common.model.File file : representation.getFiles()) { String fileUrl = ""; if (!taskStatusChecker.hasKillFlag(taskId)) { try { fileUrl = fileServiceClient.getFileUri(representation.getCloudId(), representation.getRepresentationName(), representation.getVersion(), file.getFileName()).toString(); final StormTaskTuple fileTuple = buildNextStormTuple(stormTaskTuple, fileUrl); tuplesWithFileUrls.put(fileTuple); count++; } catch (Exception e) { LOGGER.warn("Error while getting File URI from MCS {}", e.getMessage()); count++; emitErrorNotification(taskId, fileUrl, "Error while getting File URI from MCS " + e.getMessage(), ""); } } else break; } } else { LOGGER.warn("Problem while reading representation"); } return count; } QueueFiller(TaskStatusChecker taskStatusChecker, SpoutOutputCollector collector, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls); int addTupleToQueue(StormTaskTuple stormTaskTuple, FileServiceClient fileServiceClient, Representation representation); }
@Test public void shouldEmitTheFilesWhenNoRevisionIsSpecified() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask = prepareDpsTask(dataSets, prepareStormTaskTupleParameters()); Representation representation = testHelper.prepareRepresentationWithMultipleFiles(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date(), 2); when(dataSetServiceClient.getRepresentationIterator(eq("testDataProvider"), eq("dataSet"))).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true, false); when(representationIterator.next()).thenReturn(representation); when(fileServiceClient.getFileUri(eq(SOURCE + CLOUD_ID), eq(SOURCE + REPRESENTATION_NAME), eq(SOURCE + VERSION), eq("fileName"))).thenReturn(new URI(FILE_URL)).thenReturn(new URI(FILE_URL)); ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls = new ArrayBlockingQueue<>(QUEUE_MAX_SIZE); TaskExecutor taskExecutor = new TaskExecutor(collector, taskStatusChecker, cassandraTaskInfoDAO, tuplesWithFileUrls, anyString(), DATASET_URLS.name(), dpsTask); taskExecutor.call(); assertEquals(tuplesWithFileUrls.size(), 2); }
@Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
@Test public void shouldFailWhenReadFileThrowMCSExceptionWhenNoRevisionIsSpecified() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask = prepareDpsTask(dataSets, prepareStormTaskTupleParameters()); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date()); when(dataSetServiceClient.getRepresentationIterator(eq("testDataProvider"), eq("dataSet"))).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true, false); when(representationIterator.next()).thenReturn(representation); doThrow(MCSException.class).when(fileServiceClient).getFileUri(eq(SOURCE + CLOUD_ID), eq(SOURCE + REPRESENTATION_NAME), eq(SOURCE + VERSION), eq("fileName")); ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls = new ArrayBlockingQueue<>(QUEUE_MAX_SIZE); TaskExecutor taskExecutor = new TaskExecutor(collector, taskStatusChecker, cassandraTaskInfoDAO, tuplesWithFileUrls, anyString(), DATASET_URLS.name(), dpsTask); taskExecutor.call(); verify(collector, times(0)).emit(anyListOf(Object.class)); verify(fileServiceClient, times(1)).getFileUri(eq(SOURCE + CLOUD_ID), eq(SOURCE + REPRESENTATION_NAME), eq(SOURCE + VERSION), eq("fileName")); verify(collector, times(1)).emit(eq(AbstractDpsBolt.NOTIFICATION_STREAM_NAME), anyListOf(Object.class)); }
@Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
@Test public void shouldFailPerEachFileWhenReadFileThrowDriverExceptionWhenNoRevisionIsSpecified() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask = prepareDpsTask(dataSets, prepareStormTaskTupleParameters()); Representation representation = testHelper.prepareRepresentation(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date()); when(dataSetServiceClient.getRepresentationIterator(eq("testDataProvider"), eq("dataSet"))).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true, false); when(representationIterator.next()).thenReturn(representation); doThrow(DriverException.class).when(fileServiceClient).getFileUri(eq(SOURCE + CLOUD_ID), eq(SOURCE + REPRESENTATION_NAME), eq(SOURCE + VERSION), eq("fileName")); ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls = new ArrayBlockingQueue<>(QUEUE_MAX_SIZE); TaskExecutor taskExecutor = new TaskExecutor(collector, taskStatusChecker, cassandraTaskInfoDAO, tuplesWithFileUrls, anyString(), DATASET_URLS.name(), dpsTask); taskExecutor.call(); verify(collector, times(0)).emit(anyListOf(Object.class)); verify(fileServiceClient, times(1)).getFileUri(eq(SOURCE + CLOUD_ID), eq(SOURCE + REPRESENTATION_NAME), eq(SOURCE + VERSION), eq("fileName")); verify(collector, times(1)).emit(eq(AbstractDpsBolt.NOTIFICATION_STREAM_NAME), anyListOf(Object.class)); }
@Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
@Test public void shouldStopEmittingFilesWhenTaskIsKilled() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(false, false, true); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask = prepareDpsTask(dataSets, prepareStormTaskTupleParameters()); Representation representation = testHelper.prepareRepresentationWithMultipleFiles(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date(), 2); when(dataSetServiceClient.getRepresentationIterator(eq("testDataProvider"), eq("dataSet"))).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true, false); when(representationIterator.next()).thenReturn(representation); when(fileServiceClient.getFileUri(eq(SOURCE + CLOUD_ID), eq(SOURCE + REPRESENTATION_NAME), eq(SOURCE + VERSION), eq("fileName"))).thenReturn(new URI(FILE_URL)).thenReturn(new URI(FILE_URL)); ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls = new ArrayBlockingQueue<>(QUEUE_MAX_SIZE); TaskExecutor taskExecutor = new TaskExecutor(collector, taskStatusChecker, cassandraTaskInfoDAO, tuplesWithFileUrls, anyString(), DATASET_URLS.name(), dpsTask); taskExecutor.call(); assertEquals(tuplesWithFileUrls.size(), 1); }
@Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
@Test public void shouldNotEmitAnyFilesWhenTaskIsKilledBeforeIteratingRepresentation() throws Exception { when(taskStatusChecker.hasKillFlag(anyLong())).thenReturn(true); when(collector.emit(anyListOf(Object.class))).thenReturn(null); List<String> dataSets = new ArrayList<>(); dataSets.add(DATASET_URL); DpsTask dpsTask = prepareDpsTask(dataSets, prepareStormTaskTupleParameters()); Representation representation = testHelper.prepareRepresentationWithMultipleFiles(SOURCE + CLOUD_ID, SOURCE + REPRESENTATION_NAME, SOURCE + VERSION, SOURCE_VERSION_URL, DATA_PROVIDER, false, new Date(), 2); when(dataSetServiceClient.getRepresentationIterator(eq("testDataProvider"), eq("dataSet"))).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true, false); when(representationIterator.next()).thenReturn(representation); when(fileServiceClient.getFileUri(eq(SOURCE + CLOUD_ID), eq(SOURCE + REPRESENTATION_NAME), eq(SOURCE + VERSION), eq("fileName"))).thenReturn(new URI(FILE_URL)).thenReturn(new URI(FILE_URL)); ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls = new ArrayBlockingQueue<>(QUEUE_MAX_SIZE); TaskExecutor taskExecutor = new TaskExecutor(collector, taskStatusChecker, cassandraTaskInfoDAO, tuplesWithFileUrls, anyString(), DATASET_URLS.name(), dpsTask); taskExecutor.call(); assertEquals(tuplesWithFileUrls.size(), 0); }
@Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
TaskExecutor implements Callable<Void> { @Override public Void call() { try { execute(); } catch (Exception e) { taskStatusUpdater.setTaskDropped(dpsTask.getTaskId(), "The task was dropped because of " + e.getMessage() + ". The full exception is" + Throwables.getStackTraceAsString(e)); } return null; } TaskExecutor(SpoutOutputCollector collector, TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, ArrayBlockingQueue<StormTaskTuple> tuplesWithFileUrls, String mcsClientURL, String stream, DpsTask dpsTask); @Override Void call(); }
@Test public void testLoadingDefaultPropertiesFile() throws FileNotFoundException, IOException { reader.loadDefaultPropertyFile(DEFAULT_PROPERTIES_FILE, topologyProperties); assertNotNull(topologyProperties); assertFalse(topologyProperties.isEmpty()); for (final Map.Entry<Object, Object> e : topologyProperties.entrySet()) { assertNotNull(e.getKey()); } }
public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
@Test public void testLoadingProvidedPropertiesFile() throws FileNotFoundException, IOException { reader.loadProvidedPropertyFile(PROVIDED_PROPERTIES_FILE, topologyProperties); assertNotNull(topologyProperties); assertFalse(topologyProperties.isEmpty()); for (final Map.Entry<Object, Object> e : topologyProperties.entrySet()) { assertNotNull(e.getKey()); } }
public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
@Test(expected = FileNotFoundException.class) public void testLoadingNonExistedDefaultFile() throws FileNotFoundException, IOException { reader.loadDefaultPropertyFile("NON_EXISTED_FILE", topologyProperties); }
public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
PropertyFileLoader { public void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties) throws IOException { InputStream propertiesInputStream = Thread.currentThread() .getContextClassLoader().getResourceAsStream(defaultPropertyFile); if (propertiesInputStream == null) throw new FileNotFoundException(); topologyProperties.load(propertiesInputStream); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
@Test public void testCreateZipFolderPath() { Date date = new Date(); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-ssss"); String expectedFolderName = ECLOUD_SUFFIX + "-" + dateFormat.format(date); String folderPath = FileUtil.createZipFolderPath(date); String extension = FilenameUtils.getExtension(folderPath); String folderName = FilenameUtils.getBaseName(folderPath); assertEquals(extension, ZIP_EXTENSION); assertEquals(folderName, expectedFolderName); }
public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; }
FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } }
FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } }
FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } static void persistStreamToFile(InputStream inputStream, String folderPath, String fileName, String extension); static String createFilePath(String folderPath, String fileName, String extension); static String createFolder(); static String createZipFolderPath(Date date); }
FileUtil { public static String createZipFolderPath(Date date) { String folderName = generateFolderName(date); return System.getProperty("user.dir") + "/" + folderName + ZIP_FORMAT_EXTENSION; } static void persistStreamToFile(InputStream inputStream, String folderPath, String fileName, String extension); static String createFilePath(String folderPath, String fileName, String extension); static String createFolder(); static String createZipFolderPath(Date date); }
@Test(expected = FileNotFoundException.class) public void testLoadingNonExistedProvidedFile() throws FileNotFoundException, IOException { reader.loadProvidedPropertyFile("NON_EXISTED_FILE", topologyProperties); }
public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
PropertyFileLoader { public void loadProvidedPropertyFile(String fileName, Properties topologyProperties) throws IOException { File file = new File(fileName); FileInputStream fileInput = new FileInputStream(file); topologyProperties.load(fileInput); fileInput.close(); } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
@Test public void testLoadingFileWhenProvidedPropertyFileNotExisted() throws FileNotFoundException, IOException { PropertyFileLoader.loadPropertyFile(DEFAULT_PROPERTIES_FILE, "NON_EXISTED_PROVIDED_FILE", topologyProperties); assertNotNull(topologyProperties); assertFalse(topologyProperties.isEmpty()); for (final Map.Entry<Object, Object> e : topologyProperties.entrySet()) { assertNotNull(e.getKey()); } }
public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
@Test public void testLoadingFileWhenDefaultFileNotExists() throws FileNotFoundException, IOException { PropertyFileLoader.loadPropertyFile("NON_EXISTED_DEFAULT_FILE", PROVIDED_PROPERTIES_FILE, topologyProperties); assertTrue(topologyProperties.isEmpty()); }
public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
PropertyFileLoader { public static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties) { try { PropertyFileLoader reader = new PropertyFileLoader(); reader.loadDefaultPropertyFile(defaultPropertyFile, topologyProperties); if (!"".equals(providedPropertyFile)) { reader.loadProvidedPropertyFile(providedPropertyFile, topologyProperties); } } catch (IOException e) { LOGGER.error(Throwables.getStackTraceAsString(e)); } } static void loadPropertyFile(String defaultPropertyFile, String providedPropertyFile, Properties topologyProperties); void loadDefaultPropertyFile(String defaultPropertyFile, Properties topologyProperties); void loadProvidedPropertyFile(String fileName, Properties topologyProperties); }
@Test public void getTaskStatisticsReport() { List<NodeStatistics> stats = prepareStats(); Mockito.when(cassandraNodeStatisticsDAO.getNodeStatistics(TASK_ID)).thenReturn(stats); Mockito.when(cassandraNodeStatisticsDAO.getStatisticsReport(TASK_ID)).thenReturn(null); StatisticsReport actual = cassandraStatisticsService.getTaskStatisticsReport(TASK_ID); Mockito.verify(cassandraNodeStatisticsDAO, Mockito.times(1)).storeStatisticsReport(eq(TASK_ID), Mockito.any(StatisticsReport.class)); Mockito.verify(cassandraNodeStatisticsDAO, Mockito.times(1)).getNodeStatistics(eq(TASK_ID)); assertEquals(TASK_ID, actual.getTaskId()); assertThat(actual.getNodeStatistics().size(), is(2)); assertEquals(stats, actual.getNodeStatistics()); }
@Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } @Override StatisticsReport getTaskStatisticsReport(long taskId); @Override List<NodeReport> getElementReport(long taskId, String elementPath); }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } @Override StatisticsReport getTaskStatisticsReport(long taskId); @Override List<NodeReport> getElementReport(long taskId, String elementPath); }
@Test public void getStoredTaskStatisticsReport() { StatisticsReport report = new StatisticsReport(TASK_ID, prepareStats()); Mockito.when(cassandraNodeStatisticsDAO.getStatisticsReport(TASK_ID)).thenReturn(report); StatisticsReport actual = cassandraStatisticsService.getTaskStatisticsReport(TASK_ID); Mockito.verify(cassandraNodeStatisticsDAO, Mockito.times(0)).storeStatisticsReport(eq(TASK_ID), Mockito.any(StatisticsReport.class)); Mockito.verify(cassandraNodeStatisticsDAO, Mockito.times(0)).getNodeStatistics(eq(TASK_ID)); assertEquals(TASK_ID, actual.getTaskId()); assertThat(actual.getNodeStatistics().size(), is(2)); assertEquals(report, actual); }
@Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } @Override StatisticsReport getTaskStatisticsReport(long taskId); @Override List<NodeReport> getElementReport(long taskId, String elementPath); }
CassandraValidationStatisticsService implements ValidationStatisticsReportService { @Override public StatisticsReport getTaskStatisticsReport(long taskId) { StatisticsReport report = cassandraNodeStatisticsDAO.getStatisticsReport(taskId); if (report == null) { List<NodeStatistics> nodeStatistics = cassandraNodeStatisticsDAO.getNodeStatistics(taskId); if (nodeStatistics == null || nodeStatistics.isEmpty()) { return null; } report = new StatisticsReport(taskId, nodeStatistics); cassandraNodeStatisticsDAO.storeStatisticsReport(taskId, report); } return report; } @Override StatisticsReport getTaskStatisticsReport(long taskId); @Override List<NodeReport> getElementReport(long taskId, String elementPath); }
@Test public void testExecutionWithMultipleTasks() throws Exception { when(taskInfoDAO.hasKillFlag(TASK_ID)).thenReturn(false, false, false, true, true); when(taskInfoDAO.hasKillFlag(TASK_ID2)).thenReturn(false, false, true); boolean task1killedFlag = false; boolean task2killedFlag = false; for (int i = 0; i < 8; i++) { if (i < 4) assertFalse(task1killedFlag); if (i < 3) assertFalse(task2killedFlag); task1killedFlag = taskStatusChecker.hasKillFlag(TASK_ID); if (i < 5) task2killedFlag = taskStatusChecker.hasKillFlag(TASK_ID2); Thread.sleep(6000); } verify(taskInfoDAO, times(8)).hasKillFlag(eq(TASK_ID)); verify(taskInfoDAO, times(5)).hasKillFlag(eq(TASK_ID2)); assertTrue(task1killedFlag); assertTrue(task2killedFlag); Thread.sleep(20000); verifyNoMoreInteractions(taskInfoDAO); }
public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } }
TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } }
TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } private TaskStatusChecker(CassandraConnectionProvider cassandraConnectionProvider); TaskStatusChecker(CassandraTaskInfoDAO taskDAO); }
TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } private TaskStatusChecker(CassandraConnectionProvider cassandraConnectionProvider); TaskStatusChecker(CassandraTaskInfoDAO taskDAO); static synchronized TaskStatusChecker getTaskStatusChecker(); static synchronized void init(CassandraConnectionProvider cassandraConnectionProvider); boolean hasKillFlag(long taskId); }
TaskStatusChecker { public boolean hasKillFlag(long taskId) { try { return cache.get(taskId); } catch (ExecutionException e) { LOGGER.info(e.getMessage()); return false; } } private TaskStatusChecker(CassandraConnectionProvider cassandraConnectionProvider); TaskStatusChecker(CassandraTaskInfoDAO taskDAO); static synchronized TaskStatusChecker getTaskStatusChecker(); static synchronized void init(CassandraConnectionProvider cassandraConnectionProvider); boolean hasKillFlag(long taskId); static final int CHECKING_INTERVAL_IN_SECONDS; static final int CONCURRENCY_LEVEL; static final int SIZE; }
@Test public void parameterIsProvidedTest() { stormTaskTuple.addParameter(PluginParameterKeys.MIME_TYPE, MIME_TYPE); assertTrue(TaskTupleUtility.isProvidedAsParameter(stormTaskTuple, PluginParameterKeys.MIME_TYPE)); }
protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
@Test public void parameterIsNotProvidedTest() { assertFalse(TaskTupleUtility.isProvidedAsParameter(stormTaskTuple, PluginParameterKeys.MIME_TYPE)); }
protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
TaskTupleUtility { protected static boolean isProvidedAsParameter(StormTaskTuple stormTaskTuple, String parameter) { if (stormTaskTuple.getParameter(parameter) != null) { return true; } else { return false; } } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
@Test public void getDefaultValueTest() { assertEquals(TaskTupleUtility.getParameterFromTuple(stormTaskTuple, PluginParameterKeys.MIME_TYPE), PluginParameterKeys.PLUGIN_PARAMETERS.get(PluginParameterKeys.MIME_TYPE)); }
public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
@Test public void getProvidedValueTest() { stormTaskTuple.addParameter(PluginParameterKeys.MIME_TYPE, MIME_TYPE); assertEquals(TaskTupleUtility.getParameterFromTuple(stormTaskTuple, PluginParameterKeys.MIME_TYPE), MIME_TYPE); }
public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
TaskTupleUtility { public static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter) { String outputValue = PluginParameterKeys.PLUGIN_PARAMETERS.get(parameter); if (isProvidedAsParameter(stormTaskTuple, parameter)) { outputValue = stormTaskTuple.getParameter(parameter); } return outputValue; } static String getParameterFromTuple(StormTaskTuple stormTaskTuple, String parameter); }
@Test(expected = ZipException.class) public void shouldThrowZipExceptionWhileCompressEmptyFolder() throws Exception { folderPath = FileUtil.createFolder(); File folder = new File(folderPath); assertTrue(folder.isDirectory()); zipFolderPath = FileUtil.createZipFolderPath(new Date()); FolderCompressor.compress(folderPath, zipFolderPath); System.out.println(folderPath); }
public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); }
@Test public void repeatOnError3Times_callNoThrowsExceptions_validResult() throws Exception { when(call.call()).thenReturn(RESULT); String result = Retriever.retryOnError3Times(ERROR_MESSAGE, call); assertEquals(RESULT, result); }
public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
@Test public void repeatOnError3Times_callNoThrowsExceptions_callInvokedOnce() throws Exception { when(call.call()).thenReturn(RESULT); String result = Retriever.retryOnError3Times(ERROR_MESSAGE, call); verify(call).call(); }
public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
@Test(expected = IOException.class) public void repeatOnError3Times_callAlwaysThrowsExceptions_catchedException() throws Exception { when(call.call()).thenThrow(IOException.class); Retriever.retryOnError3Times(ERROR_MESSAGE, call); }
public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
@Test public void repeatOnError3Timescall_AlwaysThrowsExceptions_callInvoked3Times() throws Exception { when(call.call()).thenThrow(IOException.class); try { Retriever.<String,IOException>retryOnError3Times(ERROR_MESSAGE, call); } catch (IOException e) { e.printStackTrace(); } verify(call,times(4)).call(); }
public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
Retriever { public static void retryOnError3Times(String errorMessage, Runnable runnable) { retryOnError3Times(errorMessage,()->{ runnable.run(); return null; }); } static void retryOnError3Times(String errorMessage, Runnable runnable); static V retryOnError3Times(String errorMessage, Callable<V> callable); static void waitForSpecificTime(int milliSecond); }
@Test public void synchronizeShouldNotFailIfThereIsNoTask() { synchronizer.synchronizeTasksByTaskStateFromBasicInfo(TOPOLOGY_NAME, TOPICS); }
public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
@Test public void synchronizedShouldRepairInconsistentData() { when(tasksByStateDAO.listAllActiveTasksInTopology(eq(TOPOLOGY_NAME))).thenReturn(Collections.singletonList(TASK_TOPIC_INFO_1)); when(taskInfoDAO.findByIds(eq(Collections.singleton(1L)))).thenReturn(Collections.singletonList(INFO_1_OF_UNSYNCED)); synchronizer.synchronizeTasksByTaskStateFromBasicInfo(TOPOLOGY_NAME, TOPICS); verify(tasksByStateDAO).updateTask(eq(TOPOLOGY_NAME), eq(1L), eq(TaskState.QUEUED.toString()), eq(TaskState.PROCESSED.toString())); }
public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
@Test public void synchronizedShouldNotTouchTasksWithConsistentData() { when(tasksByStateDAO.listAllActiveTasksInTopology(eq(TOPOLOGY_NAME))).thenReturn(Collections.singletonList(TASK_TOPIC_INFO_1)); when(taskInfoDAO.findByIds(eq(Collections.singleton(1L)))).thenReturn(Collections.singletonList(INFO_1)); synchronizer.synchronizeTasksByTaskStateFromBasicInfo(TOPOLOGY_NAME, TOPICS); verify(tasksByStateDAO, never()).updateTask(any(), anyLong(), any(), any()); }
public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
@Test public void synchronizedShouldOnlyConcernTasksWithTopicReservedForTopology() { when(tasksByStateDAO.listAllActiveTasksInTopology(eq(TOPOLOGY_NAME))).thenReturn(Collections.singletonList(TASK_TOPIC_INFO_1_UNKNOWN_TOPIC)); synchronizer.synchronizeTasksByTaskStateFromBasicInfo(TOPOLOGY_NAME, TOPICS); verify(taskInfoDAO, never()).findByIds(eq(Collections.singleton(1L))); verify(tasksByStateDAO, never()).updateTask(any(), anyLong(), any(), any()); }
public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
TaskStatusSynchronizer { public void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics) { List<TaskInfo> tasksFromTaskByTaskStateTableList = tasksByStateDAO.listAllActiveTasksInTopology(topologyName); Map<Long, TaskInfo> tasksFromTaskByTaskStateTableMap = tasksFromTaskByTaskStateTableList.stream().filter(info -> availableTopics.contains(info.getTopicName())) .collect(Collectors.toMap(TaskInfo::getId, Function.identity())); List<TaskInfo> tasksFromBasicInfoTable = taskInfoDAO.findByIds(tasksFromTaskByTaskStateTableMap.keySet()); List<TaskInfo> tasksToCorrect = tasksFromBasicInfoTable.stream().filter(this::isFinished).collect(Collectors.toList()); for (TaskInfo task : tasksToCorrect) { tasksByStateDAO.updateTask(topologyName, task.getId(), tasksFromTaskByTaskStateTableMap.get(task.getId()).getState().toString(), task.getState().toString()); } } TaskStatusSynchronizer(CassandraTaskInfoDAO taskInfoDAO, TasksByStateDAO tasksByStateDAO); void synchronizeTasksByTaskStateFromBasicInfo(String topologyName, Collection<String> availableTopics); }
@Test public void testUpdateBasicInfoStateWithStartDateAndInfo() throws Exception { long taskId = 1; int containsElements = 1; int expectedSize = 1; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; Date startTime = new Date(); TaskInfo expectedTaskInfo = createTaskInfo(taskId, containsElements, topologyName, taskState, taskInfo, null, startTime, null); taskInfoDAO.insert(taskId, topologyName, expectedSize, containsElements, taskState.toString(), taskInfo, null, startTime, null, 0, null); final Tuple tuple = createTestTuple(NotificationTuple.prepareUpdateTask(taskId, taskInfo, taskState, startTime)); testedBolt.execute(tuple); TaskInfo result = taskInfoDAO.findById(taskId).get(); assertThat(result, notNullValue()); assertThat(result, is(expectedTaskInfo)); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void testUpdateBasicInfoStateWithFinishDateAndInfo() throws Exception { long taskId = 1; int containsElements = 1; int expectedSize = 1; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; Date finishDate = new Date(); TaskInfo expectedTaskInfo = createTaskInfo(taskId, containsElements, topologyName, taskState, taskInfo, null, null, finishDate); taskInfoDAO.insert(taskId, topologyName, expectedSize, containsElements, taskState.toString(), taskInfo, null, null, finishDate, 0, null); final Tuple tuple = createTestTuple(NotificationTuple.prepareEndTask(taskId, taskInfo, taskState, finishDate)); testedBolt.execute(tuple); TaskInfo result = taskInfoDAO.findById(taskId).get(); assertThat(result, notNullValue()); assertThat(result, is(expectedTaskInfo)); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void shouldSuccessfullyCompressFolder() throws Exception { folderPath = FileUtil.createFolder(); File folder = new File(folderPath); assertTrue(folder.isDirectory()); InputStream inputStream = IOUtils.toInputStream("some test data for my input stream"); createFile(inputStream, folderPath + "fileName"); zipFolderPath = FileUtil.createZipFolderPath(new Date()); FolderCompressor.compress(folderPath, zipFolderPath); assertNotNull(zipFolderPath); }
public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); }
FolderCompressor { public static void compress(String folderPath, String zipFolderPath) throws ZipException { File folder = new File(folderPath); ZipUtil.pack(folder, new File(zipFolderPath)); } static void compress(String folderPath, String zipFolderPath); }
@Test public void verifyOnlyOneNotificationForRepeatedRecord() throws Exception { long taskId = 1; taskInfoDAO.insert(taskId, null, 10, 0, TaskState.CURRENTLY_PROCESSING.toString(), "", null, null, null, 0, null); Tuple tuple = createNotificationTuple(taskId, RecordState.SUCCESS); testedBolt.execute(tuple); testedBolt.execute(tuple); TaskInfo taskProgress = cassandraReportService.getTaskProgress(String.valueOf(taskId)); List<SubTaskInfo> notifications = cassandraReportService.getDetailedTaskReportBetweenChunks("" + taskId, 0, 100); assertThat(notifications, hasSize(1)); assertEquals(taskProgress.getProcessedElementCount(), 1); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void testSuccessfulNotificationFor101Tuples() throws Exception { long taskId = 1; int expectedSize = 101; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, expectedSize, 0, taskState.toString(), taskInfo, null, null, null, 0, null); final Tuple setUpTuple = createTestTuple(NotificationTuple.prepareUpdateTask(taskId, taskInfo, taskState, null)); testedBolt.execute(setUpTuple); TaskInfo beforeExecute = cassandraReportService.getTaskProgress(String.valueOf(taskId)); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); for (int i = 0; i < 98; i++) { testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); } Thread.sleep(5001); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); TaskInfo afterOneHundredExecutions = cassandraReportService.getTaskProgress(String.valueOf(taskId)); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); assertEquals(beforeExecute.getProcessedElementCount(), 0); assertThat(beforeExecute.getState(), is(TaskState.CURRENTLY_PROCESSING)); assertEquals(afterOneHundredExecutions.getProcessedElementCount(), 100); assertThat(afterOneHundredExecutions.getState(), is(TaskState.CURRENTLY_PROCESSING)); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void testSuccessfulProgressUpdateAfterBoltRecreate() throws Exception { long taskId = 1; int expectedSize =4; String topologyName = ""; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, expectedSize, 0, taskState.toString(), taskInfo, null, null, null, 0, null); final Tuple setUpTuple = createTestTuple(NotificationTuple.prepareUpdateTask(taskId, taskInfo, taskState, null)); testedBolt.execute(setUpTuple); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); createBolt(); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); Thread.sleep(5001); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); TaskInfo info = cassandraReportService.getTaskProgress(String.valueOf(taskId)); assertEquals(3, info.getProcessedElementCount()); assertEquals(TaskState.CURRENTLY_PROCESSING, info.getState()); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); info = cassandraReportService.getTaskProgress(String.valueOf(taskId)); assertEquals(expectedSize, info.getProcessedElementCount()); assertEquals(TaskState.PROCESSED, info.getState()); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void testValidNotificationAfterBoltRecreate() throws Exception { long taskId = 1; int expectedSize = 2; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, 2, 0, taskState.toString(), taskInfo, null, null, null, 0, null); final Tuple setUpTuple = createTestTuple(NotificationTuple.prepareUpdateTask(taskId, taskInfo, taskState, null)); testedBolt.execute(setUpTuple); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); createBolt(); testedBolt.execute(createNotificationTuple(taskId, RecordState.SUCCESS)); assertEquals(expectedSize,subtaskDAO.getProcessedFilesCount(taskId)); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void testValidErrorReportDataAfterBoltRecreate() throws Exception { long taskId = 1; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, 2, 0, taskState.toString(), taskInfo, null, null, null, 0, null); final Tuple setUpTuple = createTestTuple(NotificationTuple.prepareUpdateTask(taskId, taskInfo, taskState, null)); testedBolt.execute(setUpTuple); testedBolt.execute(createNotificationTuple(taskId, RecordState.ERROR, RESOURCE_1)); createBolt(); testedBolt.execute(createNotificationTuple(taskId, RecordState.ERROR, RESOURCE_2)); assertEquals(2,subtaskDAO.getProcessedFilesCount(taskId)); TaskErrorsInfo errorReport = cassandraReportService.getGeneralTaskErrorReport("" + taskId, 100); assertEquals(1 ,errorReport.getErrors().size()); assertEquals(2 ,errorReport.getErrors().get(0).getOccurrences()); TaskErrorsInfo specificReport = cassandraReportService.getSpecificTaskErrorReport("" + taskId, errorReport.getErrors().get(0).getErrorType(), 100); assertEquals(1,specificReport.getErrors().size()); TaskErrorInfo specificReportErrorInfo = specificReport.getErrors().get(0); assertEquals("text",specificReportErrorInfo.getMessage()); assertEquals(2,specificReportErrorInfo.getErrorDetails().size()); assertEquals(RESOURCE_1, specificReportErrorInfo.getErrorDetails().get(0).getIdentifier()); assertEquals(RESOURCE_2, specificReportErrorInfo.getErrorDetails().get(1).getIdentifier()); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void testNotificationProgressPercentage() throws Exception { CassandraReportService cassandraReportService = new CassandraReportService(HOST, PORT, KEYSPACE, "", ""); long taskId = 1; int expectedSize = 330; int errors = 5; int middle = (int) (Math.random() * expectedSize); String topologyName = ""; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, expectedSize, 0, taskState.toString(), taskInfo, null, null, null, 0, null); final Tuple setUpTuple = createTestTuple(NotificationTuple.prepareUpdateTask(taskId, taskInfo, taskState, null)); testedBolt.execute(setUpTuple); List<Tuple> tuples = prepareTuples(taskId, expectedSize, errors); TaskInfo beforeExecute = cassandraReportService.getTaskProgress(String.valueOf(taskId)); TaskInfo middleExecute = null; for (int i = 0; i < tuples.size(); i++) { if(i == middle - 1){ Thread.sleep(5001); testedBolt.execute(tuples.get(i)); middleExecute = cassandraReportService.getTaskProgress(String.valueOf(taskId)); }else{ testedBolt.execute(tuples.get(i)); } } TaskInfo afterExecute = cassandraReportService.getTaskProgress(String.valueOf(taskId)); assertEquals(beforeExecute.getProcessedElementCount(), 0); assertThat(beforeExecute.getState(), is(TaskState.CURRENTLY_PROCESSING)); assertEquals(beforeExecute.getProcessedPercentage(), 0); if (middleExecute != null) { assertEquals(middleExecute.getProcessedElementCount(), (middle)); assertThat(middleExecute.getState(), is(TaskState.CURRENTLY_PROCESSING)); assertEquals(middleExecute.getProcessedPercentage(), 100 * middle / expectedSize); } int totalProcessed = expectedSize; assertEquals(afterExecute.getProcessedElementCount(), totalProcessed+(expectedSize - totalProcessed) ); assertThat(afterExecute.getState(), is(TaskState.PROCESSED)); assertEquals(afterExecute.getProcessedPercentage(), 100 * ((afterExecute.getProcessedElementCount() / (totalProcessed+(expectedSize - totalProcessed))))); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void testNotificationForErrors() throws Exception { CassandraReportService cassandraReportService = new CassandraReportService(HOST, PORT, KEYSPACE, "", ""); long taskId = 1; int expectedSize = 20; int errors = 9; String topologyName = null; TaskState taskState = TaskState.CURRENTLY_PROCESSING; String taskInfo = ""; taskInfoDAO.insert(taskId, topologyName, expectedSize, 0, taskState.toString(), taskInfo, null, null, null, 0, null); final Tuple setUpTuple = createTestTuple(NotificationTuple.prepareUpdateTask(taskId, taskInfo, taskState, null)); testedBolt.execute(setUpTuple); List<Tuple> tuples = prepareTuples(taskId, expectedSize, errors); TaskInfo beforeExecute = cassandraReportService.getTaskProgress(String.valueOf(taskId)); for (Tuple tuple : tuples) { testedBolt.execute(tuple); } TaskErrorsInfo errorsInfo = cassandraReportService.getGeneralTaskErrorReport(String.valueOf(taskId), 0); assertEquals(beforeExecute.getProcessedElementCount(), 0); assertThat(beforeExecute.getState(), is(TaskState.CURRENTLY_PROCESSING)); assertEquals(beforeExecute.getErrors(), 0); assertEquals(errorsInfo.getErrors().size(), 1); assertEquals(errorsInfo.getErrors().get(0).getOccurrences(), errors); }
@Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
NotificationBolt extends BaseRichBolt { @Override public void execute(Tuple tuple) { try { NotificationTuple notificationTuple = NotificationTuple .fromStormTuple(tuple); NotificationCache nCache = cache.get(notificationTuple.getTaskId()); if (nCache == null) { nCache = new NotificationCache(notificationTuple.getTaskId()); cache.put(notificationTuple.getTaskId(), nCache); } storeTaskDetails(notificationTuple, nCache); } catch (NoHostAvailableException | QueryExecutionException ex) { LOGGER.error("Cannot store notification to Cassandra because: {}", ex.getMessage()); } catch (Exception ex) { LOGGER.error("Problem with store notification because: {}", ex.getMessage(), ex); } finally { outputCollector.ack(tuple); } } NotificationBolt(String hosts, int port, String keyspaceName, String userName, String password); @Override void execute(Tuple tuple); @Override void prepare(Map stormConf, TopologyContext tc, OutputCollector outputCollector); @Override void declareOutputFields(OutputFieldsDeclarer ofd); void clearCache(); }
@Test public void executeMcsBasedTask_taskIsNotKilled_verifyUpdateTaskInfoInCassandra() { task.addDataEntry(InputDataType.FILE_URLS, Collections.singletonList(FILE_URL_1)); submiter.execute(submitParameters); verify(taskStatusUpdater).updateStatusExpectedSize(eq(TASK_ID), eq(String.valueOf(TaskState.QUEUED)),eq(1)); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_oneFileUrl() { task.addDataEntry(InputDataType.FILE_URLS, Collections.singletonList(FILE_URL_1)); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_threeFileUrls() { task.addDataEntry(InputDataType.FILE_URLS, Arrays.asList(FILE_URL_1,FILE_URL_2,FILE_URL_3)); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_2,FILE_URL_3); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void shouldSuccessfullyDownloadTwoRecords() throws Exception { Representation representation = prepareRepresentation(); inputStream = IOUtils.toInputStream("some test data for my input stream"); inputStream2 = IOUtils.toInputStream("some test data for my input stream"); when(dataSetServiceClient.getRepresentationIterator(anyString(), anyString())).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true, false); when(representationIterator.next()).thenReturn(representation); when(fileServiceClient.getFileUri(CLOUD_ID, REPRESENTATION_NAME, VERSION, FILE)).thenReturn(new URI(FILE_URL)); when(fileServiceClient.getFileUri(CLOUD_ID, REPRESENTATION_NAME, VERSION, FILE + "2")).thenReturn(new URI(FILE_URL2)); when(fileServiceClient.getFile(FILE_URL)).thenReturn(inputStream); when(fileServiceClient.getFile(FILE_URL2)).thenReturn(inputStream2); String folderPtah = recordDownloader.downloadFilesFromDataSet(DATA_PROVIDER, DATASET_NAME, REPRESENTATION_NAME,1); assertNotNull(folderPtah); java.io.File folder = new java.io.File(folderPtah); assert (folder.isDirectory()); assertEquals(folder.list().length, 2); FileUtils.forceDelete(new java.io.File(folderPtah)); }
public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } RecordDownloader(DataSetServiceClient dataSetServiceClient, FileServiceClient fileServiceClient); }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } RecordDownloader(DataSetServiceClient dataSetServiceClient, FileServiceClient fileServiceClient); final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount); }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } RecordDownloader(DataSetServiceClient dataSetServiceClient, FileServiceClient fileServiceClient); final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount); }
@Test public void executeMcsBasedTask_3000FileUrls() { List<String> fileUrls=new ArrayList<>(); for(int i =0;i<3000;i++) { fileUrls.add(FILE_URL_1); } task.addDataEntry(InputDataType.FILE_URLS, fileUrls); submiter.execute(submitParameters); verifyValidTaskSent(fileUrls.toArray(new String[0])); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_oneDatasetWithOneFile() { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); when(dataSetServiceClient.getRepresentationIterator(eq(DATASET_PROVIDER_1),eq(DATASET_ID_1))).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true,false); when(representationIterator.next()).thenReturn(REPRESENTATION_1); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_oneDatasetWithThreeFiles() { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); when(dataSetServiceClient.getRepresentationIterator(eq(DATASET_PROVIDER_1),eq(DATASET_ID_1))).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(true,true,true,false); when(representationIterator.next()).thenReturn(REPRESENTATION_1); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_1,FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_oneLastRevisionWithOneFile() throws MCSException { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); task.addParameter(PluginParameterKeys.REVISION_NAME, REVISION_NAME); task.addParameter(PluginParameterKeys.REVISION_PROVIDER,REVISION_PROVIDER_1); task.addParameter(PluginParameterKeys.REPRESENTATION_NAME, REPRESENTATION_NAME); when(dataSetServiceClient.getLatestDataSetCloudIdByRepresentationAndRevisionChunk(eq(DATASET_ID_1) , eq(DATASET_PROVIDER_1), eq(REVISION_PROVIDER_1), eq(REVISION_NAME), eq(REPRESENTATION_NAME), eq(false), eq(null))).thenReturn(latestDataChunk); when(latestDataChunk.getResults()).thenReturn(latestDataList); latestDataList.add(new CloudIdAndTimestampResponse(CLOUD_ID1, FILE_CREATION_DATE_1)); when(recordServiceClient.getRepresentationsByRevision(eq(CLOUD_ID1),eq(REPRESENTATION_NAME),eq(REVISION_NAME),eq(REVISION_PROVIDER_1),anyString())).thenReturn(Collections.singletonList(REPRESENTATION_1)); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_lastRevisionsForTwoObject_verifyTwoRecordsSentToKafka() throws MCSException { prepareInvocationForLastRevisionOfTwoObjects(); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_lastRevisionsForTwoObjectAndLimitTo1_verifyOnlyOneRecordSentToKafka() throws MCSException { prepareInvocationForLastRevisionOfTwoObjects(); task.addParameter(PluginParameterKeys.SAMPLE_SIZE,"1"); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_lastRevisionsForThreeObjectsInThreeChunks_verifyThreeRecordsSentToKafka() throws MCSException { prepareInvocationForLastRevisionForThreeObjectsInThreeChunks(); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_1,FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_lastRevisionsForThreeObjectsInThreeChunks_verifyOnlyTwoRecordSentToKafka() throws MCSException { prepareInvocationForLastRevisionForThreeObjectsInThreeChunks(); task.addParameter(PluginParameterKeys.SAMPLE_SIZE,"2"); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1,FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void executeMcsBasedTask_oneRevisionForGivenTimestampWithOneFile() throws MCSException { task.addDataEntry(InputDataType.DATASET_URLS, Collections.singletonList(DATASET_URL_1)); task.addParameter(PluginParameterKeys.REVISION_NAME, REVISION_NAME); task.addParameter(PluginParameterKeys.REVISION_PROVIDER,REVISION_PROVIDER_1); task.addParameter(PluginParameterKeys.REVISION_TIMESTAMP,FILE_CREATION_DATE_STRING_1); task.addParameter(PluginParameterKeys.REPRESENTATION_NAME, REPRESENTATION_NAME); when(dataSetServiceClient.getDataSetRevisionsChunk( eq(DATASET_PROVIDER_1), eq(DATASET_ID_1), eq(REPRESENTATION_NAME), eq(REVISION_NAME),eq(REVISION_PROVIDER_1),eq(FILE_CREATION_DATE_STRING_1), eq(null), eq(null))).thenReturn(dataChunk); when(dataChunk.getResults()).thenReturn(dataList); dataList.add(new CloudTagsResponse(CLOUD_ID1,false,false,false)); when(recordServiceClient.getRepresentationsByRevision(eq(CLOUD_ID1),eq(REPRESENTATION_NAME),eq(REVISION_NAME),eq(REVISION_PROVIDER_1),anyString())).thenReturn(Collections.singletonList(REPRESENTATION_1)); submiter.execute(submitParameters); verifyValidTaskSent(FILE_URL_1); }
public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
MCSTaskSubmiter { public void execute(SubmitTaskParameters submitParameters) { DpsTask task = submitParameters.getTask(); try { LOGGER.info("Sending task id={} to topology {} by kafka topic {}. Parameters:\n{}", task.getTaskId(), submitParameters.getTopologyName(), submitParameters.getTopicName(), submitParameters); checkIfTaskIsKilled(task); logProgress(submitParameters, 0); int expectedSize; if (taskContainsFileUrls(task)) { expectedSize = executeForFilesList(submitParameters); } else { expectedSize = executeForDatasetList(submitParameters); } checkIfTaskIsKilled(task); if (expectedSize != 0) { taskStatusUpdater.updateStatusExpectedSize(task.getTaskId(), TaskState.QUEUED.toString(), expectedSize); LOGGER.info("Submitting {} records of task id={} to Kafka succeeded.", expectedSize, task.getTaskId()); } else { taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because it is empty"); LOGGER.warn("The task id={} was dropped because it is empty.", task.getTaskId()); } } catch (SubmitingTaskWasKilled e) { LOGGER.warn(e.getMessage(), e); } catch (Exception e) { LOGGER.error("MCSTaskSubmiter error for taskId={}", task.getTaskId(), e); taskStatusUpdater.setTaskDropped(task.getTaskId(), "The task was dropped because " + e.getMessage()); } } MCSTaskSubmiter(TaskStatusChecker taskStatusChecker, TaskStatusUpdater taskStatusUpdater, RecordExecutionSubmitService recordSubmitService, ProcessedRecordsDAO processedRecordsDAO, String mcsClientURL); void execute(SubmitTaskParameters submitParameters); }
@Test public void shouldGetProgressReport() throws Exception { TaskInfo taskInfo = new TaskInfo(TASK_ID, TOPOLOGY_NAME, TaskState.PROCESSED, EMPTY_STRING, 100, 100, 10, 50, new Date(), new Date(), new Date()); when(reportService.getTaskProgress(eq(Long.toString(TASK_ID)))).thenReturn(taskInfo); when(topologyManager.containsTopology(TOPOLOGY_NAME)).thenReturn(true); ResultActions response = mockMvc.perform(get(PROGRESS_REPORT_WEB_TARGET, TOPOLOGY_NAME, TASK_ID)); TaskInfo resultedTaskInfo = new ObjectMapper().readValue(response.andReturn().getResponse().getContentAsString(),TaskInfo.class); assertThat(taskInfo, is(resultedTaskInfo)); }
@GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId); @PostMapping(consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> submitTask( final HttpServletRequest request, @RequestBody final DpsTask task, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/restart", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> restartTask( final HttpServletRequest request, @PathVariable final long taskId, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/cleaner", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> cleanIndexingDataSet( @PathVariable final String topologyName, @PathVariable final String taskId, @RequestBody final DataSetCleanerParameters cleanerParameters ); @PostMapping(path = "{taskId}/permit") @PreAuthorize("hasRole('ROLE_ADMIN')") @ReturnType("java.lang.Void") ResponseEntity<Void> grantPermissions( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam String username); @PostMapping(path = "{taskId}/kill") @PreAuthorize("hasRole('ROLE_ADMIN') OR hasPermission(#taskId,'" + TASK_PREFIX + "', write)") ResponseEntity<String> killTask( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam(defaultValue = "Dropped by the user") String info); }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId); @PostMapping(consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> submitTask( final HttpServletRequest request, @RequestBody final DpsTask task, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/restart", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> restartTask( final HttpServletRequest request, @PathVariable final long taskId, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/cleaner", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> cleanIndexingDataSet( @PathVariable final String topologyName, @PathVariable final String taskId, @RequestBody final DataSetCleanerParameters cleanerParameters ); @PostMapping(path = "{taskId}/permit") @PreAuthorize("hasRole('ROLE_ADMIN')") @ReturnType("java.lang.Void") ResponseEntity<Void> grantPermissions( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam String username); @PostMapping(path = "{taskId}/kill") @PreAuthorize("hasRole('ROLE_ADMIN') OR hasPermission(#taskId,'" + TASK_PREFIX + "', write)") ResponseEntity<String> killTask( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam(defaultValue = "Dropped by the user") String info); static final String TASK_PREFIX; }
@Test(expected = RepresentationNotFoundException.class) public void shouldThrowRepresentationNotFoundException() throws Exception { when(dataSetServiceClient.getRepresentationIterator(anyString(), anyString())).thenReturn(representationIterator); when(representationIterator.hasNext()).thenReturn(false, false); recordDownloader.downloadFilesFromDataSet(DATA_PROVIDER, DATASET_NAME, EMPTY_REPRESENTATION,1); }
public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } RecordDownloader(DataSetServiceClient dataSetServiceClient, FileServiceClient fileServiceClient); }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } RecordDownloader(DataSetServiceClient dataSetServiceClient, FileServiceClient fileServiceClient); final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount); }
RecordDownloader { public final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount) throws InterruptedException, ExecutionException, IOException, DriverException,MimeTypeException, RepresentationNotFoundException { ExecutorService executorService = Executors.newFixedThreadPool(threadsCount); final String folderPath = FileUtil.createFolder(); boolean isSuccess = false; try { RepresentationIterator iterator = dataSetServiceClient.getRepresentationIterator(providerId, datasetName); boolean representationIsFound = false; while (iterator.hasNext()) { final Representation representation = iterator.next(); if (representation.getRepresentationName().equals(representationName)) { representationIsFound = true; downloadFilesInsideRepresentation(executorService, representation, folderPath); } } if (!representationIsFound) throw new RepresentationNotFoundException("The representation " + representationName + " was not found inside the dataset: " + datasetName); isSuccess = true; return folderPath; } finally { executorService.shutdown(); if (!isSuccess) FileUtils.deleteDirectory(new java.io.File(folderPath)); } } RecordDownloader(DataSetServiceClient dataSetServiceClient, FileServiceClient fileServiceClient); final String downloadFilesFromDataSet(String providerId, String datasetName, String representationName, int threadsCount); }
@Test public void shouldThrowExceptionIfTaskIdWasNotFound() throws Exception { when(reportService.getTaskProgress(eq(Long.toString(TASK_ID)))).thenThrow(AccessDeniedOrObjectDoesNotExistException.class); when(topologyManager.containsTopology(TOPOLOGY_NAME)).thenReturn(true); ResultActions response = mockMvc.perform( get(PROGRESS_REPORT_WEB_TARGET, TOPOLOGY_NAME, TASK_ID) ); response.andExpect(status().isMethodNotAllowed()); }
@GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId); @PostMapping(consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> submitTask( final HttpServletRequest request, @RequestBody final DpsTask task, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/restart", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> restartTask( final HttpServletRequest request, @PathVariable final long taskId, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/cleaner", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> cleanIndexingDataSet( @PathVariable final String topologyName, @PathVariable final String taskId, @RequestBody final DataSetCleanerParameters cleanerParameters ); @PostMapping(path = "{taskId}/permit") @PreAuthorize("hasRole('ROLE_ADMIN')") @ReturnType("java.lang.Void") ResponseEntity<Void> grantPermissions( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam String username); @PostMapping(path = "{taskId}/kill") @PreAuthorize("hasRole('ROLE_ADMIN') OR hasPermission(#taskId,'" + TASK_PREFIX + "', write)") ResponseEntity<String> killTask( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam(defaultValue = "Dropped by the user") String info); }
TopologyTasksResource { @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId) throws AccessDeniedOrObjectDoesNotExistException, AccessDeniedOrTopologyDoesNotExistException { taskSubmissionValidator.assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return reportService.getTaskProgress(taskId); } @GetMapping(value = "{taskId}/progress", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskInfo getTaskProgress( @PathVariable final String topologyName, @PathVariable final String taskId); @PostMapping(consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> submitTask( final HttpServletRequest request, @RequestBody final DpsTask task, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/restart", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> restartTask( final HttpServletRequest request, @PathVariable final long taskId, @PathVariable final String topologyName, @RequestHeader("Authorization") final String authorizationHeader ); @PostMapping(path = "{taskId}/cleaner", consumes = {MediaType.APPLICATION_JSON_VALUE}) @PreAuthorize("hasPermission(#topologyName,'" + TOPOLOGY_PREFIX + "', write)") ResponseEntity<Void> cleanIndexingDataSet( @PathVariable final String topologyName, @PathVariable final String taskId, @RequestBody final DataSetCleanerParameters cleanerParameters ); @PostMapping(path = "{taskId}/permit") @PreAuthorize("hasRole('ROLE_ADMIN')") @ReturnType("java.lang.Void") ResponseEntity<Void> grantPermissions( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam String username); @PostMapping(path = "{taskId}/kill") @PreAuthorize("hasRole('ROLE_ADMIN') OR hasPermission(#taskId,'" + TASK_PREFIX + "', write)") ResponseEntity<String> killTask( @PathVariable String topologyName, @PathVariable String taskId, @RequestParam(defaultValue = "Dropped by the user") String info); static final String TASK_PREFIX; }
@Test public void shouldGetStatisticReport() throws Exception { when(validationStatisticsService.getTaskStatisticsReport(TASK_ID)).thenReturn(new StatisticsReport(TASK_ID, null)); when(topologyManager.containsTopology(anyString())).thenReturn(true); ResultActions response = mockMvc.perform(get(VALIDATION_STATISTICS_REPORT_WEB_TARGET, TOPOLOGY_NAME, TASK_ID)); System.err.println(content().string("taskId")); response .andExpect(status().isOk()) .andExpect(jsonPath("taskId", org.hamcrest.Matchers.is((int)TASK_ID))); }
@GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } @GetMapping(path = "{taskId}/reports/details", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<SubTaskInfo> getTaskDetailedReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(defaultValue = "1") @Min(1) int from, @RequestParam(defaultValue = "100") @Min(1) int to); @GetMapping(path = "{taskId}/reports/errors", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskErrorsInfo getTaskErrorReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(required = false) String error, @RequestParam(defaultValue = "0") int idsCount); @RequestMapping(method = { RequestMethod.HEAD }, path = "{taskId}/reports/errors") @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") Boolean checkIfErrorReportExists( @PathVariable String taskId, @PathVariable final String topologyName); @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId); @GetMapping(path = "{taskId}/reports/element", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<NodeReport> getElementsValues( @PathVariable String topologyName, @PathVariable String taskId, @NotNull @RequestParam("path") String elementPath); }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } @GetMapping(path = "{taskId}/reports/details", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<SubTaskInfo> getTaskDetailedReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(defaultValue = "1") @Min(1) int from, @RequestParam(defaultValue = "100") @Min(1) int to); @GetMapping(path = "{taskId}/reports/errors", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskErrorsInfo getTaskErrorReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(required = false) String error, @RequestParam(defaultValue = "0") int idsCount); @RequestMapping(method = { RequestMethod.HEAD }, path = "{taskId}/reports/errors") @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") Boolean checkIfErrorReportExists( @PathVariable String taskId, @PathVariable final String topologyName); @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId); @GetMapping(path = "{taskId}/reports/element", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<NodeReport> getElementsValues( @PathVariable String topologyName, @PathVariable String taskId, @NotNull @RequestParam("path") String elementPath); static final String TASK_PREFIX; }
@Test public void shouldReturn405WhenStatisticsRequestedButTopologyNotFound() throws Exception { when(validationStatisticsService.getTaskStatisticsReport(TASK_ID)).thenReturn(new StatisticsReport(TASK_ID, null)); when(topologyManager.containsTopology(anyString())).thenReturn(false); ResultActions response = mockMvc.perform( get(VALIDATION_STATISTICS_REPORT_WEB_TARGET, TOPOLOGY_NAME, TASK_ID) ); response.andExpect(status().isMethodNotAllowed()); }
@GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } @GetMapping(path = "{taskId}/reports/details", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<SubTaskInfo> getTaskDetailedReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(defaultValue = "1") @Min(1) int from, @RequestParam(defaultValue = "100") @Min(1) int to); @GetMapping(path = "{taskId}/reports/errors", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskErrorsInfo getTaskErrorReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(required = false) String error, @RequestParam(defaultValue = "0") int idsCount); @RequestMapping(method = { RequestMethod.HEAD }, path = "{taskId}/reports/errors") @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") Boolean checkIfErrorReportExists( @PathVariable String taskId, @PathVariable final String topologyName); @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId); @GetMapping(path = "{taskId}/reports/element", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<NodeReport> getElementsValues( @PathVariable String topologyName, @PathVariable String taskId, @NotNull @RequestParam("path") String elementPath); }
ReportResource { @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") public StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId) throws AccessDeniedOrTopologyDoesNotExistException, AccessDeniedOrObjectDoesNotExistException { assertContainTopology(topologyName); reportService.checkIfTaskExists(taskId, topologyName); return validationStatisticsService.getTaskStatisticsReport(Long.parseLong(taskId)); } @GetMapping(path = "{taskId}/reports/details", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<SubTaskInfo> getTaskDetailedReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(defaultValue = "1") @Min(1) int from, @RequestParam(defaultValue = "100") @Min(1) int to); @GetMapping(path = "{taskId}/reports/errors", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") TaskErrorsInfo getTaskErrorReport( @PathVariable String taskId, @PathVariable final String topologyName, @RequestParam(required = false) String error, @RequestParam(defaultValue = "0") int idsCount); @RequestMapping(method = { RequestMethod.HEAD }, path = "{taskId}/reports/errors") @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") Boolean checkIfErrorReportExists( @PathVariable String taskId, @PathVariable final String topologyName); @GetMapping(path = "{taskId}/statistics", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") StatisticsReport getTaskStatisticsReport( @PathVariable String topologyName, @PathVariable String taskId); @GetMapping(path = "{taskId}/reports/element", produces = {MediaType.APPLICATION_JSON_VALUE, MediaType.APPLICATION_XML_VALUE}) @PreAuthorize("hasPermission(#taskId,'" + TASK_PREFIX + "', read)") List<NodeReport> getElementsValues( @PathVariable String topologyName, @PathVariable String taskId, @NotNull @RequestParam("path") String elementPath); static final String TASK_PREFIX; }
@Test public void shouldProvideSubmitterForDepublicationTopology() { TaskSubmitter taskSubmitter = new TaskSubmitterFactory( Mockito.mock(OaiTopologyTaskSubmitter.class), Mockito.mock(HttpTopologyTaskSubmitter.class), Mockito.mock(OtherTopologiesTaskSubmitter.class), Mockito.mock(DepublicationTaskSubmitter.class) ).provideTaskSubmitter( SubmitTaskParameters.builder().topologyName(TopologiesNames.DEPUBLICATION_TOPOLOGY).build()); assertTrue(taskSubmitter instanceof DepublicationTaskSubmitter); }
public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } TaskSubmitterFactory(OaiTopologyTaskSubmitter oaiTopologyTaskSubmitter, HttpTopologyTaskSubmitter httpTopologyTaskSubmitter, OtherTopologiesTaskSubmitter otherTopologiesTaskSubmitter, TaskSubmitter depublicationTaskSubmitter); }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } TaskSubmitterFactory(OaiTopologyTaskSubmitter oaiTopologyTaskSubmitter, HttpTopologyTaskSubmitter httpTopologyTaskSubmitter, OtherTopologiesTaskSubmitter otherTopologiesTaskSubmitter, TaskSubmitter depublicationTaskSubmitter); TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters); }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } TaskSubmitterFactory(OaiTopologyTaskSubmitter oaiTopologyTaskSubmitter, HttpTopologyTaskSubmitter httpTopologyTaskSubmitter, OtherTopologiesTaskSubmitter otherTopologiesTaskSubmitter, TaskSubmitter depublicationTaskSubmitter); TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters); }
@Test public void shouldProvideSubmitterForOaiTopology() { TaskSubmitter taskSubmitter = new TaskSubmitterFactory( Mockito.mock(OaiTopologyTaskSubmitter.class), Mockito.mock(HttpTopologyTaskSubmitter.class), Mockito.mock(OtherTopologiesTaskSubmitter.class), Mockito.mock(DepublicationTaskSubmitter.class) ).provideTaskSubmitter( SubmitTaskParameters.builder().topologyName(TopologiesNames.OAI_TOPOLOGY).build()); assertTrue(taskSubmitter instanceof OaiTopologyTaskSubmitter); }
public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } TaskSubmitterFactory(OaiTopologyTaskSubmitter oaiTopologyTaskSubmitter, HttpTopologyTaskSubmitter httpTopologyTaskSubmitter, OtherTopologiesTaskSubmitter otherTopologiesTaskSubmitter, TaskSubmitter depublicationTaskSubmitter); }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } TaskSubmitterFactory(OaiTopologyTaskSubmitter oaiTopologyTaskSubmitter, HttpTopologyTaskSubmitter httpTopologyTaskSubmitter, OtherTopologiesTaskSubmitter otherTopologiesTaskSubmitter, TaskSubmitter depublicationTaskSubmitter); TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters); }
TaskSubmitterFactory { public TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters) { switch (parameters.getTopologyName()) { case TopologiesNames.OAI_TOPOLOGY: return oaiTopologyTaskSubmitter; case TopologiesNames.HTTP_TOPOLOGY: return httpTopologyTaskSubmitter; case TopologiesNames.ENRICHMENT_TOPOLOGY: case TopologiesNames.INDEXING_TOPOLOGY: case TopologiesNames.LINKCHECK_TOPOLOGY: case TopologiesNames.MEDIA_TOPOLOGY: case TopologiesNames.NORMALIZATION_TOPOLOGY: case TopologiesNames.VALIDATION_TOPOLOGY: case TopologiesNames.XSLT_TOPOLOGY: return otherTopologiesTaskSubmitter; case TopologiesNames.DEPUBLICATION_TOPOLOGY: return depublicationTaskSubmitter; default: throw new IllegalArgumentException("Unable to find the TaskSubmitter for the given topology name: " + parameters.getTopologyName()); } } TaskSubmitterFactory(OaiTopologyTaskSubmitter oaiTopologyTaskSubmitter, HttpTopologyTaskSubmitter httpTopologyTaskSubmitter, OtherTopologiesTaskSubmitter otherTopologiesTaskSubmitter, TaskSubmitter depublicationTaskSubmitter); TaskSubmitter provideTaskSubmitter(SubmitTaskParameters parameters); }