jdbc OracleDataSource not releasing connection after executing statement - java

I am using below setup for fetching data from oracle DB and connection pooling.
When I do getConnection on this setup, oracle data source always creates new connection and returns, until max limit is reached. Even if I finished executing the statement. how do I release connection?
I know I can use apache dpcp or oracle upc, but I want to fix below code and I can not use any other frameworks.
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import oracle.jdbc.pool.OracleConnectionCacheManager;
import oracle.jdbc.pool.OracleDataSource;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class DatabaseUtility {
private static final String CACHE_NAME = "DBCACHE";
private static OracleDataSource ods = null;
private static final Logger LOGGER = LogManager.getLogger(DatabaseUtility.class);
public static synchronized void init(Properties properties) throws SQLException {
if (ods == null) {
LOGGER.info("OracleDataSource Initialization");
ods = new OracleDataSource();
ods.setURL(PropertiesLoader.getValue(properties, "jdbc.datasource.url"));
ods.setUser(PropertiesLoader.getValue(properties, "jdbc.datasource.username"));
ods.setPassword(PropertiesLoader.getValue(properties, "jdbc.datasource.password"));
ods.setConnectionCachingEnabled(true);
ods.setConnectionCacheName(CACHE_NAME);
Properties cacheProps = new Properties();
cacheProps.setProperty(
"MinLimit", PropertiesLoader.getValue(properties, "jdbc.datasource.minlimit"));
cacheProps.setProperty(
"MaxLimit", PropertiesLoader.getValue(properties, "jdbc.datasource.maxlimit"));
cacheProps.setProperty(
"InitialLimit", PropertiesLoader.getValue(properties, "jdbc.datasource.minlimit"));
cacheProps.setProperty(
"ConnectionWaitTimeout", PropertiesLoader.getValue(properties, "jdbc.datasource.wait"));
cacheProps.setProperty(
"ValidateConnection",
PropertiesLoader.getValue(properties, "jdbc.datasource.validate"));
ods.setConnectionCacheProperties(cacheProps);
}
}
private DatabaseUtility() {
throw new AssertionError("Can not create object of DatabaseUtility!");
}
public static synchronized Connection getConnection(Properties properties) throws SQLException {
LOGGER.info("Request connection!");
if (ods == null) {
init(properties);
}
return ods.getConnection();
}
public static void closePooledConnections() throws SQLException {
if (ods != null) {
ods.close();
}
}
public static void listCacheInfos() throws SQLException {
OracleConnectionCacheManager occm =
OracleConnectionCacheManager.getConnectionCacheManagerInstance();
LOGGER.info(
occm.getNumberOfAvailableConnections(CACHE_NAME)
+ " connections are available in cache "
+ CACHE_NAME);
LOGGER.info(occm.getNumberOfActiveConnections(CACHE_NAME) + " connections are active");
}
}
So, how can I release a connection for reuse after I finished executing my query?

Related

How do I test responses to successful and nonsuccessful database connections (JDBC)?

Below is code that I have. I've been trying different ways to test, including stubbing, mocking and spying. When I tried mocking the DriverManager.getConnection(), I got a message that it's private. I'm trying to practice TDD, so I know that it's not the intention to test the connection itself but rather the behavior surrounding the connection.
public class Main {
public static void main(String[] args) {
Datasource datasource = new Datasource();
if(datasource.open() == false){
return;
}
datasource.close();
}
}
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
public class Datasource {
public static final String DB_NAME = "DBNAME";
public static final String DB_USERNAME = "USERNAME";
public static final String DB_PASSWORD = "PASSWORD";
public static final String SUBPROTOCOL = "jdbc:oracle:thin:#";
public static final String SERVER_NAME = "SERVERNAME";
public static final String PORT_NUMBER = "1521";
public static final String CONNECTION_STRING = SUBPROTOCOL + SERVER_NAME + ":" + PORT_NUMBER + "/" + DB_NAME;
private Connection conn;
public boolean open(){
try {
conn = DriverManager.getConnection(CONNECTION_STRING, DB_USERNAME, DB_PASSWORD);
System.out.println("Connected to database successfully.");
return true;
} catch (SQLException e) {
System.out.println("Error connecting to database: " + e.getMessage());
return false;
}
}
/**
* Closes the connection to the HR database.
* #return void
*/
public void close() {
try {
if (conn != null) {
conn.close();
System.out.println("Closed database connection successfully.");
}
} catch (SQLException e) {
System.out.println("Error closing database connection: " + e.getMessage());
}
}
}
import static org.junit.jupiter.api.Assertions.*;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.SQLException;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Spy;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
class DatasourceTest {
#Test
void exceptionIsThrownIfHRDatabaseConnectionFails() throws SQLException {
//Datasource testDatasource = new Datasource();
//assertThrows(SQLException.class, () -> {testDatasource.open();});
//TODO come back to this to mock connection
Datasource testDatasource = mock(Datasource.class);
DriverManager testDriverManager = mock(DriverManager.class);
when(testDriverManager.getConnection(Datasource.CONNECTION_STRING, Datasource.DB_USERNAME, Datasource.DB_PASSWORD)).thenReturn(null);
assertThrows(SQLException.class, () -> {testDatasource.open();});
}
#Test
void exceptionIsThrownIfConnectionIsNullDuringClose() throws SQLException {
Datasource testDatasource = new Datasource();
DriverManager testDriverManager = mock(DriverManager.class);
when(testDriverManager.getConnection(Datasource.CONNECTION_STRING, Datasource.DB_USERNAME, Datasource.DB_PASSWORD)).thenReturn(null);
}
}
Many developers may argue this test does not make sense, but in some cases you may want to test that a connection was successfully closed after using it (eg: if you find that a bug was happening because your program was exceeding the max number of connections for a giving resource, TDD encourages you to adding a test for this bugfix). In order to do this
Design the method interface by adding a test and make it fail (class DatasetTest.java):
public void whenDatasetClosed_closedReturnsTrue() {
//Arrange
//create a new dataset instance of your Dataset Class
Dataset dataset = new Dataset();
//Act
dataset.close();
//Assert
assertTrue(dataset.isClosed());
}
make conn an attribute of Dataset class
Implement close() method in Dataset class
Add the isClosed() method to the Dataset class exposing the connection status (eg. dataset.isClosed(), class Dataset.java).
public boolean isClosed() {
return this.conn.isClosed();
}
Repeat for the case where the connection is not closed and should return false.

Implementing HikariCP in Xpages Javabean breaking

I am implementing A Javabean SQL connection in Xpages. I am trying to create a connection pool for this Bean implementation using HikariCP.
Here is my code.
Datasource Class
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.sql.DataSource;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
public class HikariDS
{
private static DataSource datasource;
public static DataSource getDataSource(String jdbcURL, String user, String pass)
{
if(datasource == null)
{
try
{
HikariConfig config = new HikariConfig();
config.setJdbcUrl(jdbcURL);
config.setUsername(user);
config.setPassword(pass);
config.setDataSourceClassName("com.microsoft.sqlserver.jdbc.SQLServerDriver");
config.setMaximumPoolSize(100);
config.setAutoCommit(false);
config.addDataSourceProperty("cachePrepStmts", "true");
config.addDataSourceProperty("prepStmtCacheSize", "250");
config.addDataSourceProperty("prepStmtCacheSqlLimit", "2048");
datasource = new HikariDataSource(config);
}
catch(Exception e)
{
System.out.println(e.toString());
}
}
return datasource;
}
}
SQL Connection bean
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.io.Serializable;
import com.sqlPoolConnect;
import javax.sql.DataSource;
public class sqlConnection implements Serializable
{
private static final long serialVersionUID = 1L;
private String query;
private String retError;
private String conString;
DataSource dataSource;
Connection con = null;
PreparedStatement prep = null;
private String USER;
private String PASS;
public sqlConnection()
{
}
public String getConString()
{
return conString;
}
public void setConString(String cS)
{
this.conString = cS;
}
public String getUSER()
{
return USER;
}
public void setUSER(String U)
{
this.USER = U;
}
public void setPASS(String P)
{
this.PASS = P;
}
public String getPASS()
{
return PASS;
}
public String getQuery()
{
return query;
}
public void setQuery(String query)
{
this.query = query;
}
public void execQuery()
{
con = null;
prep = null;
try
{
dataSource = sqlPoolConnect.getDataSource(getConString(), getUSER(), getPASS());
con = dataSource.getConnection();
prep = con.prepareStatement(getQuery());
prep.executeQuery();
}
catch(Exception sqlex)
{
sqlex.printStackTrace();
}
finally
{
try
{
if(prep != null)
{
prep.close();
}
con.rollback();
}
catch(SQLException e)
{
e.printStackTrace();
}
}
}
}
SSJS Call of Bean
function testInsert(inCon)
{
var query = "INSERT INTO table_name
(1,2,3,4,5) ";
sqlConnection.setUSER("USER");
sqlConnection.setPass("PASS");
sqlConnection.setConString(inCon);
sqlConnection.setQuery(query);
sqlConnection.execQuery();
}
When the bean is invoked. IT BREAKS AT HikariConfig config = new HikariConfig(); in the KikaryDS Class.
Please advise if anyone knows why it will break over there.
Edit:
I ran the code in a separate java project and received this error.
Exception in thread "main" java.lang.NoClassDefFoundError: com.zaxxer.hikari.HikariConfig
at HikariCP.getDataSource(HikariCP.java:20)
at HikariCP.main(HikariCP.java:45)
Caused by: java.lang.ClassNotFoundException: com.zaxxer.hikari.HikariConfig
at java.net.URLClassLoader.findClass(URLClassLoader.java:609)
at java.lang.ClassLoader.loadClassHelper(ClassLoader.java:925)
at java.lang.ClassLoader.loadClass(ClassLoader.java:870)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:343)
at java.lang.ClassLoader.loadClass(ClassLoader.java:853)
... 2 more
MANIFEST.MF
Manifest-Version: 1.0
Bnd-LastModified: 1588952278176
Build-Jdk: 11.0.2
Built-By: brettw
Bundle-Description: Ultimate JDBC Connection Pool
Bundle-DocURL: https://github.com/brettwooldridge
Bundle-License: http://www.apache.org/licenses/LICENSE-2.0.txt
Bundle-ManifestVersion: 2
Bundle-Name: HikariCP
Bundle-SymbolicName: com.zaxxer.HikariCP
Bundle-Vendor: Zaxxer.com
Bundle-Version: 3.4.5
Created-By: Apache Maven Bundle Plugin
DynamicImport-Package: *
Export-Package: com.zaxxer.hikari;version="3.4.5";uses:="com.zaxxer.hi
kari.metrics,javax.naming,javax.naming.spi,javax.sql",com.zaxxer.hika
ri.hibernate;version="3.4.5";uses:="com.zaxxer.hikari,org.hibernate,o
rg.hibernate.engine.jdbc.connections.spi,org.hibernate.service.spi",c
om.zaxxer.hikari.metrics;version="3.4.5",com.zaxxer.hikari.pool;versi
on="3.4.5";uses:="com.zaxxer.hikari,com.zaxxer.hikari.metrics,com.zax
xer.hikari.util,javax.sql",com.zaxxer.hikari.util;version="3.4.5";use
s:="javax.sql"
Import-Package: javax.management,javax.naming,javax.naming.spi,javax.s
ql,com.codahale.metrics;resolution:=optional;version="[3.2,4)",com.co
dahale.metrics.health;resolution:=optional;version="[3.2,4)",io.micro
meter.core.instrument;resolution:=optional,org.slf4j;version="[1.6,2)
",org.hibernate;resolution:=optional;version="[5.2,6)",org.hibernate.
engine.jdbc.connections.spi;resolution:=optional;version="[5.2,6)",or
g.hibernate.service;resolution:=optional;version="[5.2,6)",org.hibern
ate.service.spi;resolution:=optional;version="[5.2,6)",javax.sql.rows
et,javax.sql.rowset.serial,javax.sql.rowset.spi,org.hibernate.cfg;res
olution:=optional;version="[5.2,6)"
Multi-Release: true
Originally-Created-By: Apache Maven Bundle Plugin
Require-Capability: osgi.ee;filter:="(&(osgi.ee=JavaSE)(version=1.8))"
Tool: Bnd-4.2.0.201903051501
I have imported HikariCP-3.4.5.jar and slf4j-simple-1.7.30.jar and have ensured it is in my Java Build Path. What else could be causing this issue?

How to only allow a single connection (url/port) to read and write from a flink application

I read from a url/port perform some processing and write back to the url/port. The Url/Port allows only a single connection (of which you need to read and write when needed).
Flink can read and write to the rl port but opens 2 connections.
I have used the basic connection and from a url/port through flink
// set up the streaming execution environment
val env = StreamExecutionEnvironment.getExecutionEnvironment
val data_stream = env.socketTextStream(url, port, socket_stream_deliminator, socket_connection_retries)
.map(x => printInput(x))
.writeToSocket(url, port, new SimpleStringSchema())
//.addSink(new SocketClientSink[String](url, port.toInt, new SimpleStringSchema))
// execute program
env.execute("Flink Streaming Scala API Skeleton")
The ideal solution or only solution for my case is to read and write from the same connection and not create 2 seperate connections
How would I go about doing this?
As I said in the comment, you have to store your connection in some static variable because your Sources- and Sinks won't use the same connection otherwise.
You must also ensure that your Source and Sink run on the same JVM using the same Classloader, otherwise you will still have more than one connection.
I built this wrapper class which holds a raw Socket-Connection and a Reader/Writer instance for that connection. Because your Source will always stop before your Sink (thats how Flink works), this class also does reconnect if it was closed before.
package example;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.Socket;
public class SocketConnection implements Closeable {
private final String host;
private final int port;
private final Object lock;
private volatile Socket socket;
private volatile BufferedReader reader;
private volatile PrintStream writer;
public SocketConnection(String host, int port) {
this.host = host;
this.port = port;
this.lock = new Object();
this.socket = null;
this.reader = null;
this.writer = null;
}
private void connect() throws IOException {
this.socket = new Socket(this.host, this.port);
this.reader = new BufferedReader(new InputStreamReader(this.socket.getInputStream()));
this.writer = new PrintStream(this.socket.getOutputStream());
}
private void ensureConnected() throws IOException {
// only acquire lock if null
if (this.socket == null) {
synchronized (this.lock) {
// recheck if socket is still null
if (this.socket == null) {
connect();
}
}
}
}
public BufferedReader getReader() throws IOException {
ensureConnected();
return this.reader;
}
public PrintStream getWriter() throws IOException {
ensureConnected();
return this.writer;
}
#Override
public void close() throws IOException {
if (this.socket != null) {
synchronized (this.lock) {
if (this.socket != null) {
this.reader.close();
this.reader = null;
this.writer.close();
this.writer = null;
this.socket.close();
this.socket = null;
}
}
}
}
}
Your Main Class (or any other class) holds one instance of this class which is then accessed by both your source and your sink:
package example;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
public class Main {
public static final SocketConnection CONNECTION = new SocketConnection("your-host", 12345);
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.addSource(new SocketTextStreamSource())
.addSink(new SocketTextStreamSink());
env.execute("Flink Streaming Scala API Skeleton");
}
}
Your SourceFunction could look more or less like this:
package example;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
public class SocketTextStreamSource implements SourceFunction<String> {
private volatile boolean running;
public SocketTextStreamSource() {
this.running = true;
}
#Override
public void run(SourceContext<String> context) throws Exception {
try (SocketConnection conn = Main.CONNECTION) {
String line;
while (this.running && (line = conn.getReader().readLine()) != null) {
context.collect(line);
}
}
}
#Override
public void cancel() {
this.running = false;
}
}
And your SinkFunction:
package example;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
public class SocketTextStreamSink extends RichSinkFunction<String> {
private transient SocketConnection connection;
#Override
public void open(Configuration parameters) throws Exception {
this.connection = Main.CONNECTION;
}
#Override
public void invoke(String value, Context context) throws Exception {
this.connection.getWriter().println(value);
this.connection.getWriter().flush();
}
#Override
public void close() throws Exception {
this.connection.close();
}
}
Note that I always use getReader() and getWriter() because the underlying Socket may have been closed in the meantime.

Database connection suffers of concurrent threads

I've recently starting working on a java webapp (JSP / Servlet) that was developed by the internal developer of a company.
This app randomly doesn't return data, and inspecting the log I found some NullPointerExceptions related to the classes' member variable which holds the database connection. Following the stack trace it seems that a second thread closes the connection after it ended its task leaving the first thread without a connection.
By the needs of the company the app uses different databases, one which rules appdata, and others which contain data the app has to retrieve. So every class attached to the main servlet may connect to one or more databases depending on the task it has to accomplish.
I'm not familiar with JavaEE but giving a look at the database connection class, I see nothing which protect threads from conflicting each other.
Which is the correct way to handle such connections?
This is the code of the Database handler:
package it.metmi.mmasgis.utils;
import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class DBManager
{
private String szDatabase;
private String szUsername;
private String szPassword;
private String szError;
private Connection db;
private boolean bConnected;
private Logger logger;
public DBManager(String szDBName)
{
this(szDBName, "", "");
}
public DBManager(String szDBName, String szName, String szPass)
{
szDatabase = szDBName;
szUsername = szName;
szPassword = szPass;
bConnected = false;
szError = "";
logger = LogManager.getFormatterLogger(DBManager.class.getName());
}
public boolean connect()
{
logger.entry();
try {
Class.forName("com.mysql.jdbc.Driver");
if(!szDatabase.isEmpty())
{
String szCon = "jdbc:mysql://localhost/" + szDatabase;
if(!szUsername.isEmpty())
{
szCon += "?user=" + szUsername;
if(!szPassword.isEmpty())
szCon += "&password=" + szPassword;
}
db = DriverManager.getConnection(szCon);
bConnected = true;
} else {
logger.error("No database name!!");
System.exit(0);
}
} catch(SQLException | ClassNotFoundException e) {
szError = e.getMessage();
e.printStackTrace();
logger.error("Can't connect: %s", e);
}
return logger.exit(bConnected);
}
public void disconnect()
{
logger.entry();
try {
db.close();
bConnected = false;
} catch(SQLException e) {
e.printStackTrace();
logger.error("Can't disconnect: %s", e);
}
logger.exit();
}
public boolean isConnected()
{
return bConnected;
}
public String getError()
{
return szError;
}
public ArrayList<HashMap<String,String>> query(String szQuery)
{
logger.entry(szQuery);
ArrayList<HashMap<String,String>> aResults = new ArrayList<HashMap<String,String>>();
int iCols = 0;
try {
Statement stmt = db.createStatement();
logger.info("Query: %s", szQuery);
ResultSet rs = stmt.executeQuery(szQuery);
ResultSetMetaData rsmd = rs.getMetaData();
iCols = rsmd.getColumnCount();
while(rs.next())
{
HashMap<String,String> pv = new HashMap<String,String>();
for(int i = 0; i < iCols; i++)
{
String szCol = rsmd.getColumnLabel(i + 1);
String szVal = rs.getString(i + 1);
pv.put(szCol, szVal);
}
aResults.add(pv);
}
rs.close();
stmt.close();
} catch(SQLException e) {
e.printStackTrace();
szError = e.getMessage();
logger.error("Error executing query: %s", e);
}
return logger.exit(aResults);
}
public boolean update(String szQuery)
{
logger.entry(szQuery);
boolean bResult = false;
try {
Statement stmt = db.createStatement();
logger.info("Query: %s", szQuery);
stmt.executeUpdate(szQuery);
bResult = true;
stmt.close();
} catch(SQLException e) {
e.printStackTrace();
szError = e.getMessage();
bResult = false;
logger.error("Error executing query: %s", e);
}
return logger.exit(bResult);
}
}
The class Task which all the servlet classes are based on, is a simple abstract class:
package it.metmi.mmasgis.servlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public abstract class Task
{
public abstract void doTask(HttpServletRequest request, HttpServletResponse response);
}
The class which throws NullPointerExceptions it this one, during the invocation of db.disconnect(). This class is called rapidly via AJAX 4 or 5 times from the interface written in JS.
package it.metmi.mmasgis.servlet.params;
import it.metmi.mmasgis.servlet.Task;
import it.metmi.mmasgis.utils.Const;
import it.metmi.mmasgis.utils.DBManager;
import it.metmi.mmasgis.utils.Query;
import it.metmi.mmasgis.utils.Utility;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class ClassType extends Task
{
private DBManager db = null;
private Logger logger = LogManager.getFormatterLogger(ClassType.class.getName());
#Override
public void doTask(HttpServletRequest request, HttpServletResponse response)
{
logger.entry(request, response);
String szCensimento = Utility.getParameter(request, "censimento");
String szCategoria = Utility.getParameter(request, "category");
ArrayList<HashMap<String,String>> aClasses = new ArrayList<HashMap<String,String>>();
PrintWriter out = null;
logger.debug("Census: %s", szCensimento);
logger.debug("Category: %s", szCategoria);
db = new DBManager(szCensimento, Const.DB_USER, Const.DB_PASS);
if(db.connect())
{
String szQuery = String.format(Query.classes, szCategoria, szCategoria);
aClasses = db.query(szQuery);
db.disconnect();
}
try {
out = response.getWriter();
jsonEncode(aClasses, out);
} catch(IOException e) {
e.printStackTrace();
logger.error("Failed to encode JSON: %s", e);
}
logger.exit();
}
private void jsonEncode(ArrayList<HashMap<String,String>> aData, PrintWriter out)
{
HashMap<String,Object> result = new HashMap<String,Object>();
result.put("results", aData);
result.put("success", true);
Gson gson = new GsonBuilder().create();
gson.toJson(result, out);
}
}
If the webapp would use only one database, it could be rewritten as a Singleton, but in this way I have no idea on how to handle different connections for different databases.
How can avoid these exceptions?
The problem was that the connection object was declared as member.
Moving the variable inside the methods resolved.

java http proxy setting is cached until JVM restart

I'm trying to change the proxy setting for JVM in my User Interface (Eclipse Application running on Java 1.6.0.23)
if (isUseProxy()) {
System.setProperty("java.net.useSystemProxies", "true");
System.setProperty("http.proxyHost", getProxyHost());
System.setProperty("http.proxyPort", getProxyPort());
System.setProperty("https.proxyHost", getProxyHost());
System.setProperty("https.proxyPort", getProxyPort());
..........
} else {
System.clearProperty("http.proxyHost");
System.clearProperty("http.proxyPort");
System.clearProperty("https.proxyHost");
System.clearProperty("https.proxyPort");
}
the problem is that the NEW proxy server value is not used until the JVM restart, it's cached somewhere in Java.
Java version:
java.runtime.version=1.6.0_26-b03
java.specification.name=Java Platform API Specification
java.specification.vendor=Sun Microsystems Inc.
UPDATE:
magic continues... I tried isolating the problem to figure out how Java magically works with system.properties.
Looks like Java ignores the invalid proxy server setting in some random cases. This test fails:
import org.junit.Test;
import java.io.IOException;
import java.net.*;
import static org.junit.Assert.fail;
public class ProxySetTest {
#Test
public void verifyProxyIsNotCachedInJVM() throws IOException {
tryConnectionToGoogleCom();
System.setProperty("http.proxyHost", getInvalidProxyHost());
System.setProperty("http.proxyPort", getInvalidProxyPort()+"");
System.setProperty("https.proxyHost", getInvalidProxyHost());
System.setProperty("https.proxyPort", getInvalidProxyPort()+"");
// Next connection will be through the invalid proxy. must fail?
try {
tryConnectionToGoogleCom();
fail("must have failed with an exception because of invalid proxy setting");
} catch (Exception e) {
System.out.println("received exception: " + e);
}
// clear the proxy setting and try connecting again - must succeed
System.clearProperty("http.proxyHost");
System.clearProperty("http.proxyPort");
System.clearProperty("https.proxyHost");
System.clearProperty("https.proxyPort");
// and without proxy again
tryConnectionToGoogleCom();
}
private void tryConnectionToGoogleCom() throws IOException {
URL url = new URL("http://google.com");
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
}
private int getInvalidProxyPort() {
return 1234;
}
private String getInvalidProxyHost() {
return "asd";
}
}
So I am having the exact same issue and have tracked it down to axis caching the information.
It is in org.apache.axis.components.net.TransportClientPropertiesFactory
The method in question is:
public static TransportClientProperties create(String protocol)
{
TransportClientProperties tcp =
(TransportClientProperties)cache.get(protocol);
if (tcp == null) {
tcp = (TransportClientProperties)
AxisProperties.newInstance(TransportClientProperties.class,
(Class)defaults.get(protocol));
if (tcp != null) {
cache.put(protocol, tcp);
}
}
return tcp;
}
On the first call, the tcp object gets created with whatever the current JVM settings are for the proxy. On subsequent calls, it is pulling the cached version, so even if you have changed the proxy settings in the JVM, it doesn't matter. Looking to see if I can find a way to clear the cache.
package com.alskor;
import org.junit.Test;
import java.io.IOException;
import java.net.*;
import static org.junit.Assert.fail;
public class ProxySetTest {
#Test
public void verifyProxyIsNotCachedInJVM() throws IOException {
tryConnectionToGoogleCom();
ProxySelector savedSelector = ProxySelector.getDefault();
java.net.ProxySelector.setDefault(new FixedProxySelector(getInvalidProxyHost(), getInvalidProxyPort()));
// Next connection will be through the invalid proxy. must fail?
try {
tryConnectionToGoogleCom();
fail("must have failed with an exception because of invalid proxy setting");
} catch (Exception e) {
System.out.println("received exception: " + e);
}
// clear the proxy setting and try connecting again - must succeed
java.net.ProxySelector.setDefault(savedSelector);
// and without proxy again
tryConnectionToGoogleCom();
}
private void tryConnectionToGoogleCom() throws IOException {
URL url = new URL("http://google.com");
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
}
private int getInvalidProxyPort() {
return 1234;
}
private String getInvalidProxyHost() {
return "asd";
}
}
package com.alskor;
import sun.misc.RegexpPool;
import java.io.IOException;
import java.net.*;
import java.util.ArrayList;
import java.util.List;
public class FixedProxySelector extends ProxySelector {
private final String host;
private final int port;
public FixedProxySelector(String host, int port) {
this.host = host;
this.port = port;
}
#Override
public java.util.List<Proxy> select(URI uri) {
if (uri == null) {
throw new IllegalArgumentException("URI can't be null.");
}
List<Proxy> proxies = new ArrayList<Proxy>();
SocketAddress addr = new InetSocketAddress(host, port);
proxies.add(new Proxy(Proxy.Type.SOCKS, addr));
proxies.add(new Proxy(Proxy.Type.HTTP, addr));
return proxies;
}
#Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
if (uri == null || sa == null || ioe == null) {
throw new IllegalArgumentException("Arguments can't be null.");
}
throw new RuntimeException(ioe.toString(), ioe);
}
}
Pass java.net.Proxy object to URL.openConnection(Proxy) method.
You could use Proxy.NO_PROXY for direct connection. Or create new proxy like this:
new Proxy(Proxy.Type.HTTP, new InetSocketAddress("localhost", 8080))
Here is a full sample:
public class TestConnectOverProxy {
static class Connector {
private Proxy proxy = Proxy.NO_PROXY;
public void setProxy(Proxy proxy) {
this.proxy = proxy;
}
/**
* This method works with java.net.Proxy field of the Connector class
* as described
* here
*/
public String getContent(URL url) throws IOException {
HttpURLConnection conn = (HttpURLConnection) url.openConnection(proxy);
conn.connect();
System.out.println(conn.getRequestMethod());
System.out.println(conn.getResponseCode());
return IOUtils.toString(conn.getInputStream());
}
}
#Test
public void test() throws IOException {
URL url = new URL("http://www.google.com");
Connector connector = new Connector();
//connect directly
connector.setProxy(Proxy.NO_PROXY);
System.out.println(connector.getContent(url));
//connect over proxy
connector.setProxy(new Proxy(Proxy.Type.HTTP, new InetSocketAddress("localhost", 8080)));
System.out.println(connector.getContent(url));
}
}

Categories

Resources