org.apache.http.client.methods.HttpGet#setConfig()源码实例Demo

下面列出了org.apache.http.client.methods.HttpGet#setConfig() 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: xian   文件: ApacheHttpClient.java
@Override
public HttpResponse getHttpResponse() throws ConnectTimeoutException, SocketTimeoutException {
    if (client == null) {
        throw new RuntimeException("http客户端未初始化!");
    }
    HttpGet httpGet = new HttpGet(url);
    httpGet.setProtocolVersion(HttpVersion.HTTP_1_1);
    if (headers != null) {
        for (Map.Entry<String, String> entry : headers.entrySet()) {
            httpGet.setHeader(entry.getKey(), entry.getValue());
        }
    }
    try {
        httpGet.setConfig(requestConfig);
        return client.execute(httpGet);
    } catch (ConnectTimeoutException | SocketTimeoutException connectOrReadTimeout) {
        throw connectOrReadTimeout;
    } catch (Throwable e) {
        throw new RuntimeException(e);
    }
}
 
源代码2 项目: Alice-LiveMan   文件: HttpRequestUtil.java
public static HttpResponse getHttpResponse(URI url, String cookies, Map<String, String> requestProperties) throws IOException {
    HttpGet httpGet = new HttpGet(url);
    HttpClientContext context = HttpClientContext.create();
    RequestConfig.Builder builder = RequestConfig.custom();
    builder.setConnectTimeout(30000).setConnectionRequestTimeout(30000).setSocketTimeout(30000).setCookieSpec(CookieSpecs.IGNORE_COOKIES).setRedirectsEnabled(true);
    httpGet.setConfig(builder.build());
    if (StringUtils.isNotBlank(cookies)) {
        httpGet.setHeader("Cookie", cookies);
    }
    httpGet.addHeader("Accept", "*/*");
    httpGet.addHeader("Accept-Encoding", "gzip, deflate");
    httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36");
    if (requestProperties != null) {
        for (Map.Entry<String, String> entry : requestProperties.entrySet()) {
            httpGet.setHeader(entry.getKey(), entry.getValue());
        }
    }
    try {
        return client.execute(httpGet, context);
    } catch (IllegalStateException e) {
        initClient();
        throw e;
    }
}
 
源代码3 项目: clouddisk   文件: UserInfoParser.java
@Override
public HttpGet initRequest(final UserInfoParameter userInfoParameter) {
	try {
		final URIBuilder uriBuilder = new URIBuilder(CONST.URI_PATH);
		uriBuilder.addParameter(CONST.SRC_KEY, CONST.SRC_VAL);
		uriBuilder.addParameter(CONST.FROM_KEY, CONST.FROM_VAL);
		uriBuilder.addParameter(CONST.CHARSET_KEY, CONST.CHARSET_VAL);
		uriBuilder.addParameter(CONST.METHOD_KEY, CONST.METHOD_VAL);
		uriBuilder.addParameter(CONST.REQUESTSCEMA_KEY, CONST.REQUESTSCEMA_VAL);
		uriBuilder.addParameter(CONST.O_KEY, CONST.O_VAL);
		uriBuilder.addParameter(CONST.SHOW_NAME_FLAG_NAME, CONST.SHOW_NAME_FLAG_VALUE);
		uriBuilder.addParameter(CONST.HEAD_TYPE_NAME, CONST.HEAD_TYPE_VAL);
		uriBuilder.addParameter("-", TimeUtil.getTimeLenth(13));
		final HttpGet request = new HttpGet(uriBuilder.build());
		request.setConfig(RequestConfig.custom().setCookieSpec(CookieSpecs.NETSCAPE).build());
		return request;
	} catch (URISyntaxException e) {
		LOGGER.error("Error",e);
	}
	return null;
}
 
源代码4 项目: BigDataPlatform   文件: UpdaterMain.java
public static String getString(String url) {
    try {
        CloseableHttpClient httpclient = HttpClients.createDefault();
        HttpGet httpGet = new HttpGet(url);
        httpGet.setConfig(requestConfig);
        CloseableHttpResponse response = httpclient.execute(httpGet);
        if (response.getStatusLine().getStatusCode() == 200) {
            byte[] b = IOUtils.toByteArray(response.getEntity().getContent());
            String str = new String(b);
            return str;
        }
        return null;
    } catch (Exception e) {
        e.printStackTrace();
        return null;
    }

}
 
源代码5 项目: easy_javadoc   文件: HttpUtil.java
public static String get(String url) {
    if (StringUtils.isBlank(url)) {
        return null;
    }
    String result = null;
    CloseableHttpClient httpclient = null;
    CloseableHttpResponse response = null;
    try {
        httpclient = HttpClients.createDefault();
        HttpGet httpGet = new HttpGet(url);
        httpGet.setConfig(RequestConfig.custom().setSocketTimeout(SOCKET_TIMEOUT).setConnectTimeout(CONNECT_TIMEOUT).build());
        response = httpclient.execute(httpGet);
        result = EntityUtils.toString(response.getEntity());
    } catch (IOException e) {
        LOGGER.warn("请求" + url + "异常", e);
    } finally {
        HttpClientUtils.closeQuietly(response);
        HttpClientUtils.closeQuietly(httpclient);
    }
    return result;
}
 
源代码6 项目: bbs   文件: HttpClientManage.java
/**
 * 执行get请求,200返回响应内容,其他状态码返回null
 *
 * @param url
 * @return
 * @throws IOException
 */
public String doGet(String url) throws IOException {
    //创建httpClient对象
    CloseableHttpResponse response = null;
    HttpGet httpGet = new HttpGet(url);
    //设置请求参数
    httpGet.setConfig(requestConfig);
    try {
        //执行请求
        response = httpClient.execute(httpGet);
        //判断返回状态码是否为200
        if (response.getStatusLine().getStatusCode() == 200) {
            return EntityUtils.toString(response.getEntity(), "UTF-8");
        }
    } finally {
        if (response != null) {
            response.close();
        }
    }
    return null;
}
 
源代码7 项目: WeEvent   文件: CommonService.java
public HttpGet getMethod(String uri, HttpServletRequest request) throws GovernanceException {
    try {
        URIBuilder builder = new URIBuilder(uri);
        Enumeration<String> enumeration = request.getParameterNames();
        while (enumeration.hasMoreElements()) {
            String nex = enumeration.nextElement();
            builder.setParameter(nex, request.getParameter(nex));
        }
        HttpGet httpGet = new HttpGet(builder.build());
        httpGet.setConfig(getRequestConfig());
        return httpGet;
    } catch (URISyntaxException e) {
        log.error("build url method fail,error:{}", e.getMessage());
        throw new GovernanceException(ErrorCode.BUILD_URL_METHOD);
    }
}
 
源代码8 项目: galaxy   文件: HttpClientService.java
/**
 * 执行GET请求
 *
 * @param url
 * @return
 * @throws IOException
 * @throws ClientProtocolException
 */
public String doGet(String url) throws ClientProtocolException, IOException {
    // 创建http GET请求
    HttpGet httpGet = new HttpGet(url);
    httpGet.setConfig(this.requestConfig);

    CloseableHttpResponse response = null;
    try {
        // 执行请求
        response = httpClient.execute(httpGet);
        // 判断返回状态是否为200
        if (response.getStatusLine().getStatusCode() == 200) {
            return EntityUtils.toString(response.getEntity(), "UTF-8");
        }
    } finally {
        if (response != null) {
            response.close();
        }
    }
    return null;
}
 
源代码9 项目: frontend-maven-plugin   文件: FileDownloader.java
private CloseableHttpResponse executeViaProxy(Proxy proxy, String requestUrl) throws IOException {
    final CloseableHttpClient proxyClient;
    if (proxy.useAuthentication()){
        proxyClient = buildHttpClient(makeCredentialsProvider(proxy.host,proxy.port,proxy.username,proxy.password));
    } else {
        proxyClient = buildHttpClient(null);
    }

    final HttpHost proxyHttpHost = new HttpHost(proxy.host, proxy.port);

    final RequestConfig requestConfig = RequestConfig.custom().setProxy(proxyHttpHost).build();

    final HttpGet request = new HttpGet(requestUrl);
    request.setConfig(requestConfig);

    return proxyClient.execute(request);
}
 
源代码10 项目: tutorials   文件: HttpClientLiveTest.java
@Test(expected = ConnectTimeoutException.class)
public final void givenLowTimeout_whenExecutingRequestWithTimeout_thenException() throws IOException {
    final RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(50).setConnectTimeout(50).setSocketTimeout(20).build();
    final HttpGet request = new HttpGet(SAMPLE_URL);
    request.setConfig(requestConfig);
    response = instance.execute(request);
}
 
源代码11 项目: hsac-fitnesse-fixtures   文件: HttpClient.java
/**
 * @param url URL of service
 * @param response response to be filled.
 * @param headers http headers to add
 */
public void get(String url, HttpResponse response, Map<String, Object> headers, boolean followRedirect) {
    HttpGet method = new HttpGet(url);
    if (!followRedirect) {
        RequestConfig r = RequestConfig.copy(RequestConfig.DEFAULT)
                            .setRedirectsEnabled(false)
                            .build();
        method.setConfig(r);
    }
    getResponse(url, response, method, headers);
}
 
/**
 * {@inheritDoc}
 */
public List<String> getContexts() throws SparkJobServerClientException {
	List<String> contexts = new ArrayList<String>();
	final CloseableHttpClient httpClient = buildClient();
	try {
		HttpGet getMethod = new HttpGet(jobServerUrl + "contexts");
		getMethod.setConfig(getRequestConfig());
           setAuthorization(getMethod);
		HttpResponse response = httpClient.execute(getMethod);
		int statusCode = response.getStatusLine().getStatusCode();
		String resContent = getResponseContent(response.getEntity());
		if (statusCode == HttpStatus.SC_OK) {
			JSONArray jsonArray = JSONArray.fromObject(resContent);
			Iterator<?> iter = jsonArray.iterator();
			while (iter.hasNext()) {
				contexts.add((String)iter.next());
			}
		} else {
			logError(statusCode, resContent, true);
		}
	} catch (Exception e) {
		processException("Error occurs when trying to get information of contexts:", e);
	} finally {
		close(httpClient);
	}
	return contexts;
}
 
源代码13 项目: netcrusher-java   文件: HttpClientTest.java
private HttpUriRequest composeRequest() {
    RequestConfig requestConfig = RequestConfig.custom()
            .setConnectionRequestTimeout(5000)
            .setConnectTimeout(3000)
            .setSocketTimeout(1000)
            .setRedirectsEnabled(true)
            .setCircularRedirectsAllowed(false)
            .setMaxRedirects(3)
            .build();

    HttpGet request = new HttpGet(String.format(RESOURCE, CRUSHER_PORT));
    request.setConfig(requestConfig);

    return request;
}
 
源代码14 项目: sofa-rpc   文件: RpcHttpClient.java
public <T> T doGet(String url, Class<T> tClass) throws Throwable {
    long start = System.currentTimeMillis();
    CloseableHttpClient httpClient = getCloseableHttpClient();
    CloseableHttpResponse response = null;
    try {
        HttpGet httpGet = new HttpGet(url);
        RequestConfig requestConfig = parseRequestConfig();
        httpGet.setConfig(requestConfig);
        response = httpClient.execute(httpGet);
        HttpEntity entity = response.getEntity();
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("http client get success. url: {}. latency: {}ms.", url, System.currentTimeMillis() -
                start);
        }
        return JSON.parseObject(EntityUtils.toString(entity), tClass);
    } catch (Throwable throwable) {
        LOGGER.error("http client get error. url: " + url + ". latency: " + (System.currentTimeMillis() - start) +
            "ms.", throwable);
        throw throwable;
    } finally {
        if (response != null) {
            try {
                response.close();
            } catch (IOException e) {
                LOGGER.error("http client get close response error.", e);
            }
        }
    }
}
 
/**
 * {@inheritDoc}
 */
public SparkJobConfig getConfig(String jobId) throws SparkJobServerClientException {
	final CloseableHttpClient httpClient = buildClient();
	try {
		if (!isNotEmpty(jobId)) {
			throw new SparkJobServerClientException("The given jobId is null or empty.");
		}
		HttpGet getMethod = new HttpGet(jobServerUrl + "jobs/" + jobId + "/config");
		getMethod.setConfig(getRequestConfig());
           setAuthorization(getMethod);
		HttpResponse response = httpClient.execute(getMethod);
		String resContent = getResponseContent(response.getEntity());
		JSONObject jsonObj = JSONObject.fromObject(resContent);
		SparkJobConfig jobConfg = new SparkJobConfig();
		Iterator<?> keyIter = jsonObj.keys();
		while (keyIter.hasNext()) {
			String key = (String)keyIter.next();
			jobConfg.putConfigItem(key, jsonObj.get(key));
		}
		return jobConfg;
	} catch (Exception e) {
		processException("Error occurs when trying to get information of the target job config:", e);
	} finally {
		close(httpClient);
	}
	return null;
}
 
源代码16 项目: weixin-java-tools   文件: WxMaServiceImpl.java
@Override
public String getAccessToken(boolean forceRefresh) throws WxErrorException {
  Lock lock = this.getWxMaConfig().getAccessTokenLock();
  try {
    lock.lock();

    if (this.getWxMaConfig().isAccessTokenExpired() || forceRefresh) {
      String url = String.format(WxMaService.GET_ACCESS_TOKEN_URL, this.getWxMaConfig().getAppid(),
        this.getWxMaConfig().getSecret());
      try {
        HttpGet httpGet = new HttpGet(url);
        if (this.getRequestHttpProxy() != null) {
          RequestConfig config = RequestConfig.custom().setProxy(this.getRequestHttpProxy()).build();
          httpGet.setConfig(config);
        }
        try (CloseableHttpResponse response = getRequestHttpClient().execute(httpGet)) {
          String resultContent = new BasicResponseHandler().handleResponse(response);
          WxError error = WxError.fromJson(resultContent);
          if (error.getErrorCode() != 0) {
            throw new WxErrorException(error);
          }
          WxAccessToken accessToken = WxAccessToken.fromJson(resultContent);
          this.getWxMaConfig().updateAccessToken(accessToken.getAccessToken(),
            accessToken.getExpiresIn());
        } finally {
          httpGet.releaseConnection();
        }
      } catch (IOException e) {
        throw new RuntimeException(e);
      }
    }
  } finally {
    lock.unlock();
  }

  return this.getWxMaConfig().getAccessToken();
}
 
源代码17 项目: burp-rest-api   文件: BurpClientIT.java
private void sendRequestThruProxy() throws IOException, KeyStoreException, NoSuchAlgorithmException, KeyManagementException {

        SSLContext sslContext;
        sslContext = SSLContexts.custom().loadTrustMaterial((chain, authType) -> true).build();

        SSLConnectionSocketFactory sslConnectionSocketFactory =
                new SSLConnectionSocketFactory(sslContext, new String[]
                        {"SSLv2Hello", "SSLv3", "TLSv1", "TLSv1.1", "TLSv1.2"}, null,
                        NoopHostnameVerifier.INSTANCE);

        try (CloseableHttpClient httpClient = HttpClients.custom()
                .setSSLSocketFactory(sslConnectionSocketFactory)
                .build()) {
            HttpHost target = new HttpHost(BurpClientIT.TARGET_HOST);
            HttpHost proxy = new HttpHost(PROXY_HOST, PROXY_PORT, PROXY_SCHEME);

            RequestConfig config = RequestConfig.custom().setProxy(proxy).build();
            HttpGet request = new HttpGet("/");
            request.setConfig(config);

            log.info("Executing request {} to {} via {} proxy", request.getRequestLine(),
                    target.toString(), proxy.toString());

            httpClient.execute(target, request);

        }
    }
 
源代码18 项目: BUbiNG   文件: HTMLParser.java
public static void main(final String arg[]) throws IllegalArgumentException, IOException, URISyntaxException, JSAPException, NoSuchAlgorithmException {

		final SimpleJSAP jsap = new SimpleJSAP(HTMLParser.class.getName(), "Produce the digest of a page: the page is downloaded or passed as argument by specifying a file",
				new Parameter[] {
					new UnflaggedOption("url", JSAP.STRING_PARSER, JSAP.REQUIRED, "The url of the page."),
					new Switch("crossAuthorityDuplicates", 'c', "cross-authority-duplicates"),
					new FlaggedOption("charBufferSize", JSAP.INTSIZE_PARSER, Integer.toString(CHAR_BUFFER_SIZE), JSAP.NOT_REQUIRED, 'b', "buffer", "The size of the parser character buffer (0 for dynamic sizing)."),
					new FlaggedOption("file", JSAP.STRING_PARSER, JSAP.NO_DEFAULT, JSAP.NOT_REQUIRED, 'f', "file", "The page to be processed."),
					new FlaggedOption("digester", JSAP.STRING_PARSER, "MD5", JSAP.NOT_REQUIRED, 'd', "digester", "The digester to be used.")
			});

			final JSAPResult jsapResult = jsap.parse(arg);
			if (jsap.messagePrinted()) System.exit(1);

		final String url = jsapResult.getString("url");
		final String digester = jsapResult.getString("digester");
		final boolean crossAuthorityDuplicates = jsapResult.userSpecified("crossAuthorityDuplicates");
		final int charBufferSize = jsapResult.getInt("charBufferSize");

		final HTMLParser<Void> htmlParser =  new HTMLParser<>(BinaryParser.forName(digester), (TextProcessor<Void>)null, crossAuthorityDuplicates, charBufferSize);
		final SetLinkReceiver linkReceiver = new SetLinkReceiver();
		final byte[] digest;

		if (!jsapResult.userSpecified("file")) {
			final URI uri = new URI(url);
			final HttpGet request = new HttpGet(uri);
			request.setConfig(RequestConfig.custom().setRedirectsEnabled(false).build());
			digest = htmlParser.parse(uri, HttpClients.createDefault().execute(request), linkReceiver);
		}
		else {
			final String file = jsapResult.getString("file");
			final String content = IOUtils.toString(new InputStreamReader(new FileInputStream(file)));
			digest = htmlParser.parse(BURL.parse(url) , new StringHttpMessages.HttpResponse(content), linkReceiver);
		}

		System.out.println("DigestHexString: " + Hex.encodeHexString(digest));
		System.out.println("Links: " + linkReceiver.urls);

		final Set<String> urlStrings = new ObjectOpenHashSet<>();
		for (final URI link: linkReceiver.urls) urlStrings.add(link.toString());
		if (urlStrings.size() != linkReceiver.urls.size()) System.out.println("There are " + linkReceiver.urls.size() + " URIs but " + urlStrings.size() + " strings");

	}
 
源代码19 项目: ispider   文件: HttpUtil.java
/**
 * 根据url下载网页内容
 *
 * @param url
 * @return
 */
public static String getHttpContent(String url) {

    CloseableHttpClient httpClient = null;
    HttpHost proxy = null;
    if (IPProxyRepository.size() > 0) {  // 如果ip代理地址库不为空,则设置代理
        proxy = getRandomProxy();
        httpClient = HttpClients.custom().setProxy(proxy).build();  // 创建httpclient对象
    } else {
        httpClient = HttpClients.custom().build();  // 创建httpclient对象
    }
    HttpGet request = new HttpGet(url); // 构建htttp get请求
    request.setHeader("user-agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0");
    /*
    HttpHost proxy = null;
    CloseableHttpClient httpClient = HttpClients.custom().build();
    HttpGet request = new HttpGet(url); // 构建htttp get请求
    */
    /**
     * 设置超时时间
     * setConnectTimeout:设置连接超时时间,单位毫秒。
     * setConnectionRequestTimeout:设置从connect Manager获取Connection 超时时间,单位毫秒。这个属性是新加的属性,因为目前版本是可以共享连接池的。
     * setSocketTimeout:请求获取数据的超时时间,单位毫秒。 如果访问一个接口,多少时间内无法返回数据,就直接放弃此次调用。
     */
    RequestConfig requestConfig = RequestConfig.custom()
            .setConnectTimeout(5000).setConnectionRequestTimeout(1000)
            .setSocketTimeout(5000).build();
    request.setConfig(requestConfig);
    String host = null;
    Integer port = null;
    if(proxy != null) {
        host = proxy.getHostName();
        port = proxy.getPort();
    }
    try {
        long start = System.currentTimeMillis();    // 开始时间
        CloseableHttpResponse response = httpClient.execute(request);
        long end = System.currentTimeMillis();      // 结束时间
        logger.info("下载网页:{},消耗时长:{} ms,代理信息:{}", url, end - start, host + ":" + port);
        return EntityUtils.toString(response.getEntity());
    } catch (Exception e) {
        logger.error("下载网页:{}出错,代理信息:{},", url, host + ":" + port);
        // 如果该url为列表url,则将其添加到高优先级队列中
        if (url.contains("list.jd.com") || url.contains("list.suning.com")) {   // 这里为硬编码
            String domain = SpiderUtil.getTopDomain(url);   // jd.com
            retryUrl(url, domain + SpiderConstants.SPIDER_DOMAIN_HIGHER_SUFFIX);    // 添加url到jd.com.higher中
        }
        /**
         * 为什么要加入到高优先级队列中?
         * 如果该url为第一个种子url,但是解析却失败了,那么url仓库中会一直没有url,虽然爬虫程序还在执行,
         * 但是会一直提示没有url,这时就没有意义了,还需要尝试的另外一个原因是,下载网页失败很大可能是
         * 因为:
         *      1.此刻网络突然阻塞
         *      2.代理地址被限制,也就是被封了
         * 所以将其重新添加到高优先级队列中,再进行解析目前来说是比较不错的解决方案
         */
        e.printStackTrace();
    }

    return null;
}
 
源代码20 项目: airsonic   文件: VersionService.java
/**
 * Resolves the latest available Airsonic version by inspecting github.
 */
private void readLatestVersion() throws IOException {

    LOG.debug("Starting to read latest version");
    RequestConfig requestConfig = RequestConfig.custom()
            .setConnectTimeout(10000)
            .setSocketTimeout(10000)
            .build();
    HttpGet method = new HttpGet(VERSION_URL + "?v=" + getLocalVersion());
    method.setConfig(requestConfig);
    String content;
    try (CloseableHttpClient client = HttpClients.createDefault()) {
        ResponseHandler<String> responseHandler = new BasicResponseHandler();
        content = client.execute(method, responseHandler);
    } catch (ConnectTimeoutException e) {
        LOG.warn("Got a timeout when trying to reach {}", VERSION_URL);
        return;
    }

    List<String>unsortedTags = new LinkedList<>();
    for (JsonNode item: new ObjectMapper().readTree(content)) {
        unsortedTags.add(item.path("tag_name").asText());
    }

    Function<String, Version> convertToVersion = s -> {
        Matcher match = VERSION_REGEX.matcher(s);
        if (!match.matches()) {
            throw new RuntimeException("Unexpected tag format " + s);
        }
        return new Version(match.group(1));
    };

    Predicate<Version> finalVersionPredicate = version -> !version.isPreview();

    Optional<Version> betaV = unsortedTags.stream().map(convertToVersion).max(Comparator.naturalOrder());
    Optional<Version> finalV = unsortedTags.stream().map(convertToVersion).sorted(Comparator.reverseOrder()).filter(finalVersionPredicate).findFirst();

    LOG.debug("Got {} for beta version", betaV);
    LOG.debug("Got {} for final version", finalV);

    latestBetaVersion = betaV.get();
    latestFinalVersion = finalV.get();
}