I have to test a specific scenario by closing the Active-MQ consumer socket without killing the java client process and Active-MQ server.
what is the best way to close specific socket between JAVA client and Active-MQ queue?
maybe by calling org.apache.activemq.ActiveMQMessageConsumer.close() or org.apache.activemq.ActiveMQConnection.close() to close all sessions of consumers/producers
UPDATE
you can use VisualVM and call stop method,
Please consider that if your client is using failover he will reconnect automatically
or by code like below
to close 1 consumer by ID or RemoteAddress
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.management.MBeanServerConnection;
import javax.management.MBeanServerInvocationHandler;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import org.apache.activemq.broker.jmx.ConnectionViewMBean;
public class JMXCloseConsumer {
public static void main(String[] args) throws Exception {
JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:1099/jmxrmi");
Map<String, String[]> env = new HashMap<>();
String[] creds = { "admin", "admin" };
env.put(JMXConnector.CREDENTIALS, creds);
JMXConnector jmxc = JMXConnectorFactory.connect(url, env);
MBeanServerConnection conn = jmxc.getMBeanServerConnection();
ObjectName conName = new ObjectName(
"org.apache.activemq:type=Broker,brokerName=localhost,connector=clientConnectors,connectorName=openwire,connectionViewType=clientId,connectionName=*");
Set connections = conn.queryNames(conName, null);
Object[] c = connections.toArray();
for (int s = 0; s < c.length; s++) {
ObjectName con = (ObjectName) c[s];
System.out.println(con.toString());
ConnectionViewMBean connectionView = MBeanServerInvocationHandler.newProxyInstance(conn, con,
ConnectionViewMBean.class, true);
String address = connectionView.getRemoteAddress();
System.out.println(address);
connectionView.stop();
}
}
}
to close all consumers by connector name
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.management.MBeanServerConnection;
import javax.management.MBeanServerInvocationHandler;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import org.apache.activemq.broker.jmx.BrokerViewMBean;
import org.apache.activemq.broker.jmx.ConnectorViewMBean;
public class JMXCloseAllConsumers {
public static void main(String[] args) throws Exception {
JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:1099/jmxrmi");
Map<String, String[]> env = new HashMap<>();
String[] creds = { "admin", "admin" };
env.put(JMXConnector.CREDENTIALS, creds);
JMXConnector jmxc = JMXConnectorFactory.connect(url, env);
MBeanServerConnection conn = jmxc.getMBeanServerConnection();
ObjectName activeMq = new ObjectName("org.apache.activemq:type=Broker,brokerName=localhost");
BrokerViewMBean mbean = MBeanServerInvocationHandler.newProxyInstance(conn, activeMq, BrokerViewMBean.class,
true);
Map<String, String> props = mbean.getTransportConnectors();
for (Iterator<?> iter = props.keySet().iterator(); iter.hasNext();) {
String name = (String) iter.next();
String value1 = props.get(name);
System.out.println(value1);
ObjectName connector = new ObjectName(
"org.apache.activemq:type=Broker,brokerName=localhost,connector=clientConnectors,connectorName="
+ name);
ConnectorViewMBean connectorViewMBean = MBeanServerInvocationHandler.newProxyInstance(conn, connector,
ConnectorViewMBean.class, true);
System.out.println(connectorViewMBean.connectionCount());
if (connectorViewMBean.connectionCount() > 0) {
connectorViewMBean.stop();
}
}
}
}
or
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import javax.management.MBeanServerConnection;
import javax.management.MBeanServerInvocationHandler;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import org.apache.activemq.broker.jmx.BrokerViewMBean;
import org.apache.activemq.broker.jmx.ConnectionViewMBean;
import org.apache.activemq.broker.jmx.ConnectorViewMBean;
public class JMXCloseAllConsumers {
public static void main(String[] args) throws Exception {
JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:1099/jmxrmi");
Map<String, String[]> env = new HashMap<>();
String[] creds = { "admin", "admin" };
env.put(JMXConnector.CREDENTIALS, creds);
JMXConnector jmxc = JMXConnectorFactory.connect(url, env);
MBeanServerConnection conn = jmxc.getMBeanServerConnection();
ObjectName activeMq = new ObjectName("org.apache.activemq:type=Broker,brokerName=localhost");
BrokerViewMBean mbean = MBeanServerInvocationHandler.newProxyInstance(conn, activeMq, BrokerViewMBean.class,
true);
Map<String, String> props = mbean.getTransportConnectors();
for (Iterator<?> iter = props.keySet().iterator(); iter.hasNext();) {
String name = (String) iter.next();
System.out.println(name);
ObjectName connector = new ObjectName(
"org.apache.activemq:type=Broker,brokerName=localhost,connector=clientConnectors,connectorName="
+ name);
ConnectorViewMBean connectorViewMBean = MBeanServerInvocationHandler.newProxyInstance(conn, connector,
ConnectorViewMBean.class, true);
System.out.println(connectorViewMBean.connectionCount());
if (connectorViewMBean.connectionCount() > 0) {
ObjectName conName = new ObjectName(
"org.apache.activemq:type=Broker,brokerName=localhost,connector=clientConnectors,connectorName="
+ name + ",connectionViewType=clientId,connectionName=*");
Set connections = conn.queryNames(conName, null);
Object[] c = connections.toArray();
for (int s = 0; s < c.length; s++) {
ObjectName con = (ObjectName) c[s];
System.out.println(con.toString());
ConnectionViewMBean connectionView = MBeanServerInvocationHandler.newProxyInstance(conn, con,
ConnectionViewMBean.class, true);
String address = connectionView.getRemoteAddress();
System.out.println(address);
connectionView.stop();
}
connectorViewMBean.stop();
}
}
}
}
Related
How to set timeout before response in kafka properties in java language as i need to mock webhook api message pushing with timeout before response = 60 sec
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringSerializer;
public class AsyncConsumer {
public static void main(String[] args) {
// Properties to connect Kafka
Properties prop = new Properties();
prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "1");
//prop.setProperty(ConsumerConfig.de, "60000");
prop.setProperty(ConsumerConfig.HEARTBEAT_INTERVAL_MS_DOC, "60000");
prop.setProperty(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_DOC, "60000");
prop.setProperty(ConsumerConfig., "60000");
// prop.setProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "60000");
// prop.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "60000");
// prop.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_DOC, "60000");
// prop.setProperty(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, "60000");
// prop.setProperty(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, "60000");
prop.setProperty(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "70000");
// kafka Consumer
KafkaConsumer<String, Object> consumer = new KafkaConsumer<String, Object>(prop);
// Subscription of kafka
consumer.subscribe(Arrays.asList("TestTopic"));
// read data from kafka
while (true) {
ConsumerRecords<String, Object> record = consumer.poll(60000);
for (ConsumerRecord<String, Object> records : record) {
System.out.println("Key :" + records.key() + "\n" + "Value :" + records.value() + "\n" + "partition :"
+ records.partition() + "\n" + "offset :" + records.offset() + "\n" + "topic :"
+ records.topic() + "\n" + "timestamp :" + records.timestamp() + "\n");
}
}
}
}
package com.matchmove.Performance.kafka.tests;
import java.util.Date;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import com.jcraft.jsch.Logger;
import com.matchmove.tests.ASYNCpostWebhookTest;
import org.apache.kafka.common.serialization.ByteArraySerializer;
public class AsyncProducer {
public static void main(String[] args) {
// Properties to connect Kafka
Properties prop = new Properties();
prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
prop.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
prop.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());
//prop.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_DOC,"60000");
//prop.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG,"60000");
//prop.setProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,"60000");
//prop.setProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,"60000");
prop.setProperty(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, "60000");
//creating producer
KafkaProducer<String,Object> producer=new KafkaProducer<String,Object>(prop);
for(int i=0; i<=100; i++) {
ProducerRecord<String,Object> record=new ProducerRecord<String, Object>("TestTopic",ASYNCpostWebhookTest.PostAsynPerformance());
producer.send(record);
try {
Thread.sleep(1000);
}catch(InterruptedException e) {
e.printStackTrace();
}
System.out.println(record);
}
producer.close();
}
}
package com.matchmove.tests;
import org.slf4j.LoggerFactory;
import com.matchmove.utilities.RandomStringGeneration;
import io.restassured.RestAssured;
import io.restassured.response.Response;
import io.restassured.specification.RequestSpecification;
import static io.restassured.RestAssured.given;
import java.util.HashMap;
import com.github.fge.jsonschema.main.cli.Main;
import com.github.javafaker.Faker;
import com.matchmove.helpingfiles.*;
import org.slf4j.Logger;
public class ASYNCpostWebhookTest {
protected static Logger logger = LoggerFactory.getLogger(ASYNCpostWebhookTest.class.getName());
Faker fake = new Faker();
static HashMap<String, Object> AsyncBody = new HashMap<String, Object>();
static HashMap<String, Object> message = new HashMap<String, Object>();
public static RequestSpecification httpRequest;
public static Response response;
public static String generatedString = RandomStringGeneration.randomstring();
public static String generatedNumber = RandomStringGeneration.number();
public static String PostAsynPerformance() {
RestAssured.baseURI = "https://platform-svc-webhook.kops.matchmove-beta.com";
httpRequest = RestAssured.given();
AsyncBody.put("trace_id", "Amit Bhalla data " + generatedNumber);
AsyncBody.put("service_name", "fast");
AsyncBody.put("event_name", "OPENLOOP_DECLINED_PROGRAM_POS_LIMITS_BREACHED");
AsyncBody.put("push_message_to_queue_count", 10);
message.put("message", "Testing for Async by Amit Bhalla " + generatedNumber);
AsyncBody.put("event_data", message);
response = given().header("Authorization",
"Basic ZW5YZTVFMnhSU3Z6RnFCWUZvOEF2ejV2UnJsd2hEeDM6ZnNPcHp1RXE0WGFUbm0wM013YmpSS2hSYzk3TktQR2FzbjRtc1hieTB"
+ "zQ21GYUlpZ3BZTUF3UGVUNDZrUEE3SA==")
.body(AsyncBody).when().post(resources.PostAsyncWebhook);
// String responsebody = response.getBody().asString();
String body = AsyncBody.toString();
return body;
}
/*
* public static void main(String [] args) {
*
* ASYNCpostWebhookTest data=new ASYNCpostWebhookTest();
* ASYNCpostWebhookTest.PostAsynPerformance(); }
*/
}
Hi i have created a handler in java for getting the events from dynamo DB
Here is my code
package com.Lambda.dynamodb;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.LambdaLogger;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent.DynamodbStreamRecord;
public class DDBEventProcessor implements
RequestHandler<DynamodbEvent, String> {
public String handleRequest(DynamodbEvent ddbEvent, Context context) {
for (DynamodbStreamRecord record : ddbEvent.getRecords()){
System.out.println(record.getEventID());
System.out.println(record.getEventName());
System.out.println(record.getDynamodb().toString());
}
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
}
}
Lambda function able to write the events in cloudwatch but the challenge is i have to index all the streamed records to the AWS elasticsearch service endpoint and index it.
while search through blogs i got few code samples in python and node.js but my requirement is i have to build this lambda function in java
Could anyone please suggest how to achieve this in java lambda function?
Hi i have included the code below may helpful to some one. Dynamo DB streams to index the document in elasticsearch both inside AWS and outside AWS
package com.Firstlambda;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import com.amazonaws.auth.AWS4Signer;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.services.dynamodbv2.document.Item;
import com.amazonaws.services.dynamodbv2.document.ItemUtils;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent.DynamodbStreamRecord;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.json.JSONObject;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class HelloWorld implements RequestHandler<DynamodbEvent, String> {
private static String serviceName = "es";
private static String region = "us-east-1";
private static String aesEndpoint = ""
private static String index = "";
private static String type = "_doc";
static final AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();
public String handleRequest(DynamodbEvent ddbEvent, Context context) {
for (DynamodbStreamRecord record : ddbEvent.getRecords()) {
System.out.println("EventName : " + record.getEventName());
System.out.println("EventName : " + record.getDynamodb());
//AWS outside
RestHighLevelClient esClient = esClient();
//AWS outside
//AWS Inside
//RestHighLevelClient esClient = esClient(serviceName, region);
//AWS Inside
if (record.getEventName().toLowerCase().equals("insert")) {
String JsonString = getJsonstring(record.getDynamodb().getNewImage());
String JsonUniqueId = GetIdfromJsonString(JsonString);
IndexRequest indexRequest = new IndexRequest(index, type, JsonUniqueId);
indexRequest.source(JsonString, XContentType.JSON);
try {
IndexResponse indexResponse = esClient.index(indexRequest, RequestOptions.DEFAULT);
System.out.println(indexResponse.toString());
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
} catch (IOException e) {
System.out.println(e.getMessage());
}
} else if (record.getEventName().toLowerCase().equals("modify")) {
String JsonString = getJsonstring(record.getDynamodb().getNewImage());
String JsonUniqueId = GetIdfromJsonString(JsonString);
UpdateRequest request = new UpdateRequest(index, type, JsonUniqueId);
String jsonString = JsonString;
request.doc(jsonString, XContentType.JSON);
try {
UpdateResponse updateResponse = esClient.update(
request, RequestOptions.DEFAULT);
System.out.println(updateResponse.toString());
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
} catch (IOException e) {
System.out.println(e.getMessage());
}
} else {
System.out.println("remove");
System.out.println("KEYID : " + record.getDynamodb().getKeys().get("ID").getN());
String deletedId = record.getDynamodb().getKeys().get("ID").getN();
DeleteRequest request = new DeleteRequest(index, type, deletedId);
try {
DeleteResponse deleteResponse = esClient.delete(
request, RequestOptions.DEFAULT);
} catch (IOException e) {
System.out.println(e.getMessage());
}
}
}
return "Successfullyprocessed";
}
public String getJsonstring(Map<String, AttributeValue> newIma) {
String json = null;
Map<String, AttributeValue> newImage = newIma;
List<Map<String, AttributeValue>> listOfMaps = new ArrayList<Map<String, AttributeValue>>();
listOfMaps.add(newImage);
List<Item> itemList = ItemUtils.toItemList(listOfMaps);
for (Item item : itemList) {
json = item.toJSON();
}
return json;
}
public String GetIdfromJsonString(String Json) {
JSONObject jsonObj = new JSONObject(Json);
return String.valueOf(jsonObj.getInt("ID"));
}
// Adds the interceptor to the ES REST client
// public static RestHighLevelClient esClient(String serviceName, String region) {
// AWS4Signer signer = new AWS4Signer();
// signer.setServiceName(serviceName);
// signer.setRegionName(region);
// HttpRequestInterceptor interceptor = new AWSRequestSigningApacheInterceptor(serviceName, signer, credentialsProvider);
// return new RestHighLevelClient(RestClient.builder(HttpHost.create(aesEndpoint)).setHttpClientConfigCallback(hacb -> hacb.addInterceptorLast(interceptor)));
// }
public static RestHighLevelClient esClient() {
String host = "d9bc7cbca5ec49ea96a6ea683f70caca.eastus2.azure.elastic-cloud.com";
int port = 9200;
String userName = "elastic";
String password = "L4Nfnle3wxLmV95lffwsf$Ub46hp";
String protocol = "https";
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(userName, password));
RestClientBuilder builder = RestClient.builder(new HttpHost(host, port, protocol))
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
RestHighLevelClient client = new RestHighLevelClient(builder);
return client;
}
}
This is just a sample code has to be modified based on our requirements
I have been looking for some documentation/example for checking if a dynamically created topic exist and if it does, how to get the subscriber count for the topic.
I use following code for sending out message to a topic -
jmsTemplate.send(destination, new MessageCreator() {
#Override
public Message createMessage(Session session) throws JMSException {
TextMessage message = session.createTextMessage();
message.setText(commandStr);
return message;
}
});
This code seems to create the topic and publish message to topic.
I need to check if the topic exists before creating it.
Check if the topic have any subscriber.
Thanks in advance
i was able to find the solution to (1) problem (Hope this helps)-
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory("tcp://localhost:61616");
ActiveMQConnection connection = (ActiveMQConnection)connectionFactory.createConnection();
connection.start();
DestinationSource ds = connection.getDestinationSource();
Set<ActiveMQTopic> topics = ds.getTopics();
To get the destination names, as you did it is correct, you can do it by JMX too specifically to get statistical information like subscriber count ...
import java.util.HashMap;
import java.util.Map;
import javax.management.MBeanServerConnection;
import javax.management.MBeanServerInvocationHandler;
import javax.management.ObjectName;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import org.apache.activemq.broker.jmx.BrokerViewMBean;
import org.apache.activemq.broker.jmx.TopicViewMBean;
public class JMXGetDestinationInfos {
public static void main(String[] args) throws Exception {
JMXServiceURL url = new JMXServiceURL("service:jmx:rmi:///jndi/rmi://localhost:1099/jmxrmi");
Map<String, String[]> env = new HashMap<>();
String[] creds = { "admin", "admin" };
env.put(JMXConnector.CREDENTIALS, creds);
JMXConnector jmxc = JMXConnectorFactory.connect(url, env);
MBeanServerConnection conn = jmxc.getMBeanServerConnection();
ObjectName activeMq = new ObjectName("org.apache.activemq:type=Broker,brokerName=localhost");
BrokerViewMBean mbean = MBeanServerInvocationHandler.newProxyInstance(conn, activeMq, BrokerViewMBean.class,
true);
for (ObjectName name : mbean.getTopics()) {
if (("YOUR_TOPIC_NAME".equals(name.getKeyProperty("destinationName")))) {
TopicViewMBean topicMbean = MBeanServerInvocationHandler.newProxyInstance(conn, name,
TopicViewMBean.class, true);
System.out.println(topicMbean.getConsumerCount());
}
}
}
}
or
import java.util.Set;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import org.apache.activemq.ActiveMQConnection;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.advisory.DestinationSource;
import org.apache.activemq.command.ActiveMQQueue;
import org.apache.activemq.command.ActiveMQTopic;
public class AdvisorySupportGetAllDestinationsNames {
public static void main(String[] args) throws JMSException {
Connection conn = null;
try {
ConnectionFactory cf = new ActiveMQConnectionFactory("tcp://localhost:61616");
conn = cf.createConnection();
conn.start();
DestinationSource destinationSource = ((ActiveMQConnection) conn).getDestinationSource();
Set<ActiveMQQueue> queues = destinationSource.getQueues();
Set<ActiveMQTopic> topics = destinationSource.getTopics();
System.out.println(queues);
System.out.println(topics);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (conn != null) {
try {
conn.close();
} catch (Exception e) {
}
}
}
}
}
UPDATE
You can use AdvisorySupport.getConsumerAdvisoryTopic()
Note that the consumer start/stop advisory messages also have a
consumerCount header to indicate the number of active consumers on the
destination when the advisory message was sent.
I am trying to fetch all the users from AD server. There are 7000 users in AD server but while I am running my java code it is returning only 1000 user names. Is there any restriction that it will fetch only max 1000 users at a time ?
Could any one please tell me how can I fetch all the 7000 users at a time.
Here is Java Code -
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.List;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
public class FetchAllUsers {
public static void main(String args[])
{
String username = "username";
String password = "password";
String ldapURL = "url";
String contextFactory = "com.sun.jndi.ldap.LdapCtxFactory";
String securityAuthentication = "simple";
fetchUserList(username,password,ldapURL,contextFactory,securityAuthentication);
}
public static void fetchUserList(String username,String password,String ldapURL,String contextFactory,String securityAuthentication) {
Hashtable<String, String> env = new Hashtable<String, String>();
env.put(Context.INITIAL_CONTEXT_FACTORY, contextFactory);
// set security credentials
env.put(Context.SECURITY_AUTHENTICATION, securityAuthentication);
env.put(Context.SECURITY_PRINCIPAL, username);
env.put(Context.SECURITY_CREDENTIALS, password);
// connect to my domain controller
env.put(Context.PROVIDER_URL, ldapURL);
try {
List<String> usersList = new ArrayList<String>();
LdapContext ctx = new InitialLdapContext(env, null);
SearchControls searchCtls = new SearchControls();
searchCtls.setSearchScope(SearchControls.SUBTREE_SCOPE);
// specify the LDAP search filter
String searchFilter = "(&(objectCategory=user))";
// Specify the Base for the search
String searchBase = "DC=domain,DC=com";
// initialize counter to total the results
int totalResults = 0;
// Search for objects using the filter
NamingEnumeration<SearchResult> fetchData = ctx.search(searchBase, searchFilter, searchCtls);
// Loop through the search results
while (fetchData.hasMoreElements()) {
SearchResult sr = (SearchResult) fetchData.next();
totalResults++;
String names[] = sr.getName().split(",");
String name[] = names[0].split("=");
usersList.add(name[1]);
}
System.out.println("Total number of users in AD server : " + totalResults);
System.out.println(usersList);
} catch (NamingException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
}
I am not able to receive notifications using HCATALOG using JMS. I wrote simple producer consumer program.
Apache MQ service is running in background. I am able to send simple text messages using ApacheMQ. But "markPartitionForEvent()" is not able to send event to consumer's "onMessage()" call.
I refered following link :
https://cwiki.apache.org/confluence/display/Hive/HCatalog+Notification
Please guide
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.thrift.TException;
import javax.jms.MessageConsumer;
import javax.management.*;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.PartitionEventType;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hcatalog.common.HCatConstants;
import java.util.Properties;
import javax.jms.*;
import javax.jdo.*;
import javax.naming.*;
import java.io.*;
import java.io.InputStreamReader;
import java.util.*;
import javax.jms.MessageConsumer;
import org.slf4j.LoggerFactory;
import org.datanucleus.api.jdo.*;
import javax.jdo.metadata.JDOMetadata;
import org.datanucleus.store.rdbms.RDBMSStoreManager;
import org.datanucleus.properties.PropertyStore;
import org.datanucleus.store.AbstractStoreManager;
import org.apache.hadoop.hive.metastore.api.Table;
class Consumer implements MessageListener
{
public void start(){
try
{
HiveConf hiveConf;
ActiveMQConnectionFactory connFac = new ActiveMQConnectionFactory("tcp://localhost:61616");
Connection conn = connFac.createConnection();
conn.start();
hiveConf = new HiveConf(Consumer.class);
HiveMetaStoreClient msc = new HiveMetaStoreClient(hiveConf);
Table table = msc.getTable("mydb","myTbl");
table.getParameters().put(HCatConstants.HCAT_MSGBUS_TOPIC_NAME, "TOPIC"+ ".HCATALOG");
System.out.println("TABLE = " + table.toString());
Map<String,String> map = table.getParameters();
System.out.println("MAP= " + map.toString());
String fetchTopic = map.get(HCatConstants.HCAT_MSGBUS_TOPIC_NAME);
System.out.println("fetchTopic = " + fetchTopic);
String topicName = msc.getTable("mydb",
"mytbl").getParameters().get(HCatConstants.HCAT_MSGBUS_TOPIC_NAME);
System.out.println("TOPICNAME = " + topicName);
Session session = conn.createSession(false, Session.AUTO_ACKNOWLEDGE);
if (session == null)
System.out.println("NULL");
System.out.println(session.toString());
Destination hcatTopic = session.createTopic(fetchTopic);
MessageConsumer consumer = session.createConsumer(hcatTopic);
consumer.setMessageListener(this);
}
catch(Exception e){
System.out.println("ERROR");
e.printStackTrace();
}
}
#Override
public void onMessage(Message message){
try
{
if(message.getStringProperty(HCatConstants.HCAT_EVENT).equals(HCatConstants.HCAT_PARTITION_DONE_EVENT)){
MapMessage mapMsg = (MapMessage)message;
Enumeration<String> keys = mapMsg.getMapNames();
while(keys.hasMoreElements())
{
String key = keys.nextElement();
System.out.println(key + " : " + mapMsg.getString(key));
}
System.out.println("Message: "+message);
}
}
catch(Exception e){
System.out.println("ERROR");
}
}
};
class Producer extends Thread
{
private HiveConf hiveConf;
public Producer()
{
}
#Override
public void run() {
try
{
hiveConf = new HiveConf(this.getClass());
HiveMetaStoreClient msc = new HiveMetaStoreClient(hiveConf);
HashMap<String,String> partMap = new HashMap<String, String>();
partMap.put("date","20110711");
partMap.put("date","20110712");
partMap.put("date","20110714");
while(true)
{
msc.markPartitionForEvent("mydb", "mytbl", partMap, PartitionEventType.LOAD_DONE);
Thread.sleep(1000);
}
}
catch(Exception e){
System.out.println("ERROR");
}
}
};