There is an example in asio, which caches the sent messages in a deque. I think when there are too many unsent messages in this deque, such as 1000, I want to process it through constbuffersequence, that is, batch sending, so the following How should the code be changed, thank you!
void deliver(const chat_message& msg)
{
bool write_in_progress = !write_msgs_.empty();
write_msgs_.push_back(msg);
if (!write_in_progress)
{
boost::asio::async_write(socket_,
boost::asio::buffer(write_msgs_.front().data(),
write_msgs_.front().length()),
boost::bind(&chat_session::handle_write, shared_from_this(),
boost::asio::placeholders::error));
}
}
void handle_write(const boost::system::error_code& error)
{
if (!error)
{
write_msgs_.pop_front();
if (!write_msgs_.empty())
{
boost::asio::async_write(socket_,
boost::asio::buffer(write_msgs_.front().data(),
write_msgs_.front().length()),
boost::bind(&chat_session::handle_write, shared_from_this(),
boost::asio::placeholders::error));
}
}
else
{
room_.leave(shared_from_this());
}
}
You can transform the deque to any container modeling the const buffer sequence concept:
std::vector<asio::const_buffer> buffers;
std::transform(
begin(write_msgs_), end(write_msgs_), back_inserter(buffers),
[](Message const& s) { return asio::buffer(s); });
async_write( //
socket_, buffers,
[this, self = shared_from_this()] //
(error_code ec, std::size_t bytes_written) {
// ...
write_msgs_.clear();
});
The transform is a force of habit here, you might prefer
std::vector<asio::const_buffer> buffers;
for (auto& s: write_msgs_)
buffers.push_back(asio::buffer(s));
Live Demo
Modified from this recent example How to safely write to a socket from multiple threads?:
Live On Coliru
#include <boost/asio.hpp>
#include <deque>
#include <iostream>
namespace asio = boost::asio;
using boost::system::error_code;
using asio::ip::tcp;
using Message = std::string;
class chat_session : public std::enable_shared_from_this<chat_session> {
public:
chat_session(tcp::socket socket) : socket_(std::move(socket)) {}
void start() { do_read(); }
void deliver_many(std::vector<Message> msgs) {
post(socket_.get_executor(),
[this, msgs = std::move(msgs), self = shared_from_this()] //
() mutable {
for (auto& msg : msgs) {
do_write(std::move(msg));
}
});
}
void deliver(Message msg) {
post(socket_.get_executor(),
[this, msg = std::move(msg), self = shared_from_this()] //
() mutable { do_write(std::move(msg)); });
}
private:
void do_read() {
async_read_until(
socket_, asio::dynamic_buffer(incoming_), '\0',
[this, self = shared_from_this()] //
(error_code ec, std::size_t length) {
if (!ec) {
process_message(incoming_.substr(0, length - 1));
incoming_.erase(0, length);
do_read();
} else if (ec != asio::error::eof) {
std::cerr << "Read error: " << ec.message() << std::endl;
}
});
}
void do_write(Message message)
{
write_msgs_.push_back(std::move(message)); // assumed on (implicit) strand
if (write_msgs_.size() == 1) {
write_loop();
}
}
void write_loop() {
std::cerr << "write_loop with write_msgs_.size() = " << write_msgs_.size() << std::endl;
if (write_msgs_.empty())
return;
if (write_msgs_.size() > 100) {
std::vector<asio::const_buffer> buffers;
std::transform(
begin(write_msgs_), end(write_msgs_), back_inserter(buffers),
[](Message const& s) { return asio::buffer(s); });
async_write( //
socket_, buffers,
[this, self = shared_from_this()] //
(error_code ec, std::size_t /*length*/) {
if (!ec) {
write_msgs_.clear();
write_loop();
} else if (ec != asio::error::eof) {
std::cerr << "Write error: " << ec.message() << std::endl;
}
});
} else {
async_write( //
socket_, asio::buffer(write_msgs_.front()),
[this, self = shared_from_this()] //
(error_code ec, std::size_t /*length*/) {
if (!ec) {
write_msgs_.pop_front();
write_loop();
} else if (ec != asio::error::eof) {
std::cerr << "Write error: " << ec.message() << std::endl;
}
});
}
}
void process_message(Message const& message) {
std::vector<Message> responses;
for (int i = 0; i < 200; ++i) {
responses.push_back("Response #" + std::to_string(i) + " for " +
message + "\n");
}
// dispatch/post to executor because we might be on a different thread (not in this example)
// (not in this example)
post(socket_.get_executor(),
std::bind(&chat_session::deliver_many, shared_from_this(),
std::move(responses)));
}
tcp::socket socket_;
Message incoming_;
std::deque<Message> write_msgs_;
};
class server {
public:
server(asio::any_io_executor ex, unsigned short port)
: acceptor_(ex, tcp::endpoint(tcp::v4(), port))
{
do_accept();
}
private:
void do_accept()
{
acceptor_.async_accept(
make_strand(acceptor_.get_executor()),
[this](error_code ec, tcp::socket&& s) {
if (!ec) {
std::cout << "Accepted " << s.remote_endpoint() << std::endl;
std::make_shared<chat_session>(std::move(s))->start();
}
do_accept();
});
}
tcp::acceptor acceptor_;
};
int main() {
asio::thread_pool ctx;
server s(ctx.get_executor(), 8989);
ctx.join();
}
When sending a single message from a client:
g++ -std=c++20 -O2 -Wall -pedantic -pthread main.cpp
./a.out&
sleep .5; printf 'HelloWorld\0' | nc 127.0.0.1 8989 -w1
shows e.g.:
Accepted 127.0.0.1:39538
write_loop with write_msgs_.size() = 1
Response #0 for HelloWorld
write_loop with write_msgs_.size() = 199
Response #1 for HelloWorld
Response #2 for HelloWorld
Response #3 for HelloWorld
Response #4 for HelloWorld
Response #5 for HelloWorld
Response #6 for HelloWorld
Response #7 for HelloWorld
Response #8 for HelloWorld
Response #9 for HelloWorld
Response #10 for HelloWorld
Response #11 for HelloWorld
Response #12 for HelloWorld
Response #13 for HelloWorld
Response #14 for HelloWorld
Response #15 for HelloWorld
Response #16 for HelloWorld
Response #17 for HelloWorld
Response #18 for HelloWorld
Response #19 for HelloWorld
Response #20 for HelloWorld
Response #21 for HelloWorld
Response #22 for HelloWorld
Response #23 for HelloWorld
Response #24 for HelloWorld
Response #25 for HelloWorld
Response #26 for HelloWorld
Response #27 for HelloWorld
Response #28 for HelloWorld
Response #29 for HelloWorld
Response #30 for HelloWorld
Response #31 for HelloWorld
Response #32 for HelloWorld
Response #33 for HelloWorld
Response #34 for HelloWorld
Response #35 for HelloWorld
Response #36 for HelloWorld
Response #37 for HelloWorld
Response #38 for HelloWorld
Response #39 for HelloWorld
Response #40 for HelloWorld
Response #41 for HelloWorld
Response #42 for HelloWorld
Response #43 for HelloWorld
Response #44 for HelloWorld
Response #45 for HelloWorld
Response #46 for HelloWorld
Response #47 for HelloWorld
Response #48 for HelloWorld
Response #49 for HelloWorld
Response #50 for HelloWorld
Response #51 for HelloWorld
Response #52 for HelloWorld
Response #53 for HelloWorld
Response #54 for HelloWorld
Response #55 for HelloWorld
Response #56 for HelloWorld
Response #57 for HelloWorld
Response #58 for HelloWorld
Response #59 for HelloWorld
Response #60 for HelloWorld
Response #61 for HelloWorld
Response #62 for HelloWorld
Response #63 for HelloWorld
Response #64 for HelloWorld
Response #65 for HelloWorld
Response #66 for HelloWorld
Response #67 for HelloWorld
Response #68 for HelloWorld
Response #69 for HelloWorld
Response #70 for HelloWorld
Response #71 for HelloWorld
Response #72 for HelloWorld
Response #73 for HelloWorld
Response #74 for HelloWorld
Response #75 for HelloWorld
Response #76 for HelloWorld
Response #77 for HelloWorld
Response #78 for HelloWorld
Response #79 for HelloWorld
Response #80 for HelloWorld
Response #81 for HelloWorld
Response #82 for HelloWorld
Response #83 for HelloWorld
Response #84 for HelloWorld
Response #85 for HelloWorld
Response #86 for HelloWorld
Response #87 for HelloWorld
Response #88 for HelloWorld
Response #89 for HelloWorld
Response #90 for HelloWorld
Response #91 for HelloWorld
Response #92 for HelloWorld
Response #93 for HelloWorld
Response #94 for HelloWorld
Response #95 for HelloWorld
Response #96 for HelloWorld
Response #97 for HelloWorld
Response #98 for HelloWorld
Response #99 for HelloWorld
Response #100 for HelloWorld
Response #101 for HelloWorld
Response #102 for HelloWorld
Response #103 for HelloWorld
Response #104 for HelloWorld
Response #105 for HelloWorld
Response #106 for HelloWorld
Response #107 for HelloWorld
Response #108 for HelloWorld
Response #109 for HelloWorld
Response #110 for HelloWorld
Response #111 for HelloWorld
Response #112 for HelloWorld
Response #113 for HelloWorld
Response #114 for HelloWorld
Response #115 for HelloWorld
Response #116 for HelloWorld
Response #117 for HelloWorld
Response #118 for HelloWorld
Response #119 for HelloWorld
Response #120 for HelloWorld
Response #121 for HelloWorld
Response #122 for HelloWorld
Response #123 for HelloWorld
Response #124 for HelloWorld
Response #125 for HelloWorld
Response #126 for HelloWorld
Response #127 for HelloWorld
Response #128 for HelloWorld
Response #129 for HelloWorld
Response #130 for HelloWorld
Response #131 for HelloWorld
Response #132 for HelloWorld
Response #133 for HelloWorld
Response #134 for HelloWorld
Response #135 for HelloWorld
Response #136 for HelloWorld
Response #137 for HelloWorld
Response #138 for HelloWorld
Response #139 for HelloWorld
Response #140 for HelloWorld
Response #141 for HelloWorld
Response #142 for HelloWorld
Response #143 for HelloWorld
Response #144 for HelloWorld
Response #145 for HelloWorld
Response #146 for HelloWorld
Response #147 for HelloWorld
Response #148 for HelloWorld
Response #149 for HelloWorld
Response #150 for HelloWorld
Response #151 for HelloWorld
Response #152 for HelloWorld
Response #153 for HelloWorld
Response #154 for HelloWorld
Response #155 for HelloWorld
Response #156 for HelloWorld
Response #157 for HelloWorld
Response #158 for HelloWorld
Response #159 for HelloWorld
Response #160 for HelloWorld
Response #161 for HelloWorld
Response #162 for HelloWorld
Response #163 for HelloWorld
Response #164 for HelloWorld
Response #165 for HelloWorld
Response #166 for HelloWorld
Response #167 for HelloWorld
Response #168 for HelloWorld
Response #169 for HelloWorld
Response #170 for HelloWorld
Response #171 for HelloWorld
Response #172 for HelloWorld
Response #173 for HelloWorld
Response #174 for HelloWorld
Response #175 for HelloWorld
Response #176 for HelloWorld
Response #177 for HelloWorld
Response #178 for HelloWorld
Response #179 for HelloWorld
Response #180 for HelloWorld
Response #181 for HelloWorld
Response #182 for HelloWorld
Response #183 for HelloWorld
Response #184 for HelloWorld
Response #185 for HelloWorld
Response #186 for HelloWorld
Response #187 for HelloWorld
Response #188 for HelloWorld
Response #189 for HelloWorld
Response #190 for HelloWorld
Response #191 for HelloWorld
Response #192 for HelloWorld
Response #193 for HelloWorld
Response #194 for HelloWorld
Response #195 for HelloWorld
Response #196 for HelloWorld
Response #197 for HelloWorld
Response #198 for HelloWorld
Response #199 for HelloWorld
write_loop with write_msgs_.size() = 0
Related
I have a REST API using AWS API Gateway. The API is handled by a custom Lambda function. I have a /prompts endpoint in my API, for which the Lambda function will call Open AI API, send it the prompt, and stream the result to the user as it is being generated (which can take a few seconds).
I'm able to stream and handle the response from Open AI's API to my Lambda function.
I would now like to re-stream / pipe that response to the client.
My question is how to do that?
Is there a way to simply pipe the stream being received from Open AI API to my client?
My Lambda function is:
ry {
const res = await openai.createCompletion({
...params,
stream: true,
}, { responseType: 'stream' });
res.data.on('data', data => {
const lines = data.toString().split('\n').filter(line => line.trim() !== '');
for (const line of lines) {
const message = line.replace(/^data: /, '');
if (message === '[DONE]') {
// store the response to DynamoDB
storeRecord(content)
return content
}
try {
const parsed = JSON.parse(message);
content += parsed.choices[0].text
// ****** I want to send content to the front-end client... *******
} catch(error) {
console.error('Could not JSON parse stream message', message, error);
}
}
});
} catch (error) {
if (error.response?.status) {
console.error(error.response.status, error.message);
error.response.data.on('data', data => {
const message = data.toString();
try {
const parsed = JSON.parse(message);
console.error('An error occurred during OpenAI request: ', parsed);
} catch(error) {
console.error('An error occurred during OpenAI request: ', message);
}
});
} else {
console.error('An error occurred during OpenAI request', error);
}
}
I have one file which runs server on start. And there is one function to upgrade the connection based on some checks of the request.
//A.ts
export abstract class A{
protected _server: http.Server;
protected _wss: WebSocket.Server;
constructor(){
this._wss = new WebSocket.Server({ noServer: true });
}
async start(port: number) {
await this.startServer(port);
await this.startUpgardeListener();
await this.startWsConnectionListener();
}
private async startServer(port: number) {
this._server.listen(port, () => {
Logger.log(`Server started on port ` + port);
});
}
private async startUpgardeListener() {
this._server.on("upgrade", async (request: http.IncomingMessage, clientSocket, head: Buffer) => {
return await this.upgradeHandler(request, clientSocket as Socket, head);
});
}
private async startWsConnectionListener() {
this._wss.on('connection', async (clientWebSocket: WebSocket, request: http.IncomingMessage) => {
return await this.connectionHandler(clientWebSocket, request);
});
}
protected abstract upgradeHandler(request: http.IncomingMessage, clientSocket: Socket, head: Buffer): Promise<void>;
protected abstract connectionHandler(clientWebSocket: WebSocket, request: http.IncomingMessage): void;
}
//B.ts
export class FESProxy extends Proxy {
private _c = new H();
protected async upgradeHandler(request: IncomingMessage, clientTcpSocket: Socket, head: Buffer): Promise<void> {
let resource;
try{
this.initHandlers();
this.create(resource);
}
catch(e){
resource = this.cleanup(clientTcpSocket, resource);
}
}
private async create(resource){
resource = await this._c.createResource(resource);
if(resource.status === 400)
{
error = new Error(400);
throw error;
}
return true;
}
private initHandlers(resource: Resource, clientTcpSocket: Socket) {
this.initOnClose(clientTcpSocket, resource);
this.initOnError(clientTcpSocket, resource);
}
private initOnClose(clientTcpSocket: Socket, resource: Resource | undefined) {
clientTcpSocket.on('close', (err) => {
console.log(`Client TCP Socket Close Handler`);
resource = this.cleanup(clientTcpSocket, resource);
});
return resource;
}
private initOnError(clientTcpSocket: Socket, resource: Resource | undefined) {
clientTcpSocket.on('error', (e) => {
Logger.error(`Client TCP Socket Error Handler);
resource = this.cleanup(clientTcpSocket, resource);
});
return resource;
}
}
// B.spec.ts
let b : B;
let wb : WebSocket;
let workerServer : WebSocketServer;
describe("B test", () => {
beforeEach(async () => {
b = new B();
await b.start(3000);
})
afterEach(async () => {
wb.close();
workerServer.close();
b["_server"]?.close(()=>{});
});
it("Should handle 400 error in create call", async () => {
let resource = new Resource();
resource.status = 400;
const spiedOncreate = await jest.spyOn(b["_c"],"createResource").mockReturnValue(Promise.resolve(resource));
let uri = 'ws://localhost:3000/ws';
wb = new WebSocket(uri);
wb.on('error' ,(err) => {
console.log(err);
})
wb.on('close',() => {
})
wb.on('message',(e) =>{
console.log("Message in the socket ",e);
})
expect(spiedOncreate).toBeCalledTimes(1);
})
})
On running the test, I am getting this message
Expected number of calls: 1
Received number of calls: 0
Although I checked that catch block was called in the upgradeHandler method of B class with the status code of resource as 400. Which means that the createResource was called and with the status 400 which I set in the test only.
I also found that on error handler of websocket inside the test got triggered and got the message in the console
Error: Unexpected server response: 400
I am not able to see any other console logs if I put after assertion statement in the test. I guess my test is getting crashed and the error is not handled but not sure here. I added handlers in the test for websockets but still not working.
What am I missing or doing wrong to assert on the spyOn as I am not receiving the expected number of calls?
I am building a dApp using #cosmjs.
I want to make a transaction on the COSMOS chain on my dApp running on the JUNO chain.
When I tried to get the signingClient, I got the following running error.
TypeError: Failed to fetch
at http (http://localhost:3000/static/js/bundle.js:53443:12)
at HttpClient.execute (http://localhost:3000/static/js/bundle.js:53471:65)
at Tendermint34Client.detectVersion (http://localhost:3000/static/js/bundle.js:55135:35)
at Tendermint34Client.create (http://localhost:3000/static/js/bundle.js:55128:33)
at Tendermint34Client.connect (http://localhost:3000/static/js/bundle.js:55115:33)
at SigningCosmWasmClient.connectWithSigner (http://localhost:3000/static/js/bundle.js:23403:64)
at http://localhost:3000/static/js/bundle.js:127476:51
at Generator.next (<anonymous>)
at fulfilled (http://localhost:3000/static/js/bundle.js:462877:24)
Here is my code;
import { SigningCosmWasmClient } from "#cosmjs/cosmwasm-stargate";
import { GasPrice }from "#cosmjs/stargate";
.....
const config = {
chainName: "Cosmos Hub",
chainId: "cosmoshub-4",
rpcEndpoint: "https://rpc-cosmoshub.whispernode.com",
restEndpoint: "",
faucetEndpoint: "",
addressPrefix: "cosmos",
microDenom: "uatom",
coinDecimals: "6",
gasPrice: "0.025",
}
.....
await window.keplr?.enable(config.chainId);
const offlineSigner= window.getOfflineSigner?.(
config.chainId
);
const account = await offlineSigner?.getAccounts();
let wasmChainClient = null;
if (offlineSigner) {
try {
wasmChainClient = await SigningCosmWasmClient.connectWithSigner(
config.rpcEndpoint,
offlineSigner,
{
gasPrice: GasPrice.fromString(
`${config.gasPrice}${config.microDenom}`
),
}
);
} catch (e) {
console.error("wallets", e);
}
}
const result= {
account: account?.[0],
client: wasmChainClient,
};
console.log(result)
Is this the problem of rpc endpoint?
I have tried other several rpc endpoints but all of them failed.
I really don't know why this happens.
I would be very thankful if anyone could help me with solving this issue.
This happens because your browser blocks request with different origin. Your origin is http://localhost:3000, and you requested https://rpc-cosmoshub.keplr.app. So, your browser blocks the response of the requested origin if there is no access-control-allow-origin header in the response. You can learn more here (https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS/Errors).
I have a workaround program that adds necessary headers to a response.
You have to have golang to run it.
package main
import (
"flag"
"fmt"
"io/ioutil"
"log"
"net/http"
)
var hostname string
var port int
func main() {
// flags declaration using flag package
flag.StringVar(&hostname, "H", "https://rpc-cosmoshub.keplr.app", "Specify hostname")
flag.IntVar(&port, "p", 8081, "Specify port")
flag.Parse() // after declaring flags we
http.HandleFunc("/", serveCorsProxy)
log.Fatal(http.ListenAndServe(fmt.Sprintf(":%d", port), nil))
}
// Serve a reverse proxy for a given url
func serveCorsProxy(res http.ResponseWriter, req *http.Request) {
proxyRequest, err := http.NewRequest(req.Method, hostname, req.Body)
proxyRequest.URL.Path = req.URL.Path
proxyRequest.URL.RawQuery = req.URL.RawQuery
if err != nil {
fmt.Printf("create request error: %v", err)
return
}
response, err := http.DefaultClient.Do(proxyRequest)
if err != nil {
fmt.Printf("proxy request error: %v", err)
return
}
setHeaders(response, &res)
body, err := ioutil.ReadAll(response.Body)
if err != nil {
fmt.Printf("response read error: %v", err)
return
}
res.WriteHeader(response.StatusCode)
_, _ = res.Write(body)
}
func setHeaders(src *http.Response, dest *http.ResponseWriter) {
header := (*dest).Header()
for name, values := range (*src).Header {
for _, value := range values {
header.Set(name, value)
}
}
header.Set("access-control-allow-headers", "Accept,Cache-Control,Content-Type,DNT,If-Modified-Since,Keep-Alive,Origin,User-Agent,X-Requested-With")
header.Set("access-control-allow-methods", "GET, POST, OPTIONS")
header.Set("access-control-allow-origin", "*")
header.Set("access-control-expose-headers", "Content-Length,Content-Range")
header.Set("access-control-max-age", "1728000")
}
You have to save it to file main.go and run by go run main.go -H https://rpc-cosmoshub.keplr.app -p 3001.
After that you can access the RPC on localhost:3001
I am trying to chain some REST requests using restbed lib and I have an issue.
So the work flow is something like this: the frontend sends a GET request to the backend. The backend does some processing and should return a reponse to the frontend but in the same time it should also POST the resposnse to another REST server.
void CCMService::get_method_handler(const shared_ptr< Session > session)
{
const auto request = session->get_request();
int content_length = request->get_header("Content-Length", 0);
session->fetch(content_length, [](const shared_ptr< Session > session, const Bytes & body)
{
std::vector<std::string> resultImages;
fprintf(stdout, "%.*s\n", (int)body.size(), body.data());
const auto request = session->get_request();
const string parameter = request->get_path_parameter("camGroupId");
try
{
resultImages = prepareImages(parameter.c_str());
}
catch (const std::exception& e)
{
std::string error = e.what();
std::string message = "{error: \"" + error + "\"}";
throw std::exception(message.c_str());
}
fprintf(stderr, "Return response\n");
session->close(OK, resultImages[0], { { "Content-Length", std::to_string(resultImages[0].length())} });
fprintf(stderr, "Send tiles to inference\n");
//send POST request
sendResult(resultImages[1]);
});
}
void CCMService::sendResult(char* result)
{
auto request = make_shared< Request >(Uri("http://127.0.0.1:8080/api"));
request->set_header("Accept", "*/*");
request->set_header("Content-Type", "application/json");
request->set_method("POST");
request->set_header("Host", "http://127.0.0.1:8080");
//request->set_header("Cache-Control", "no-cache");
...
//create json from result - jsonContent
...
request->set_header("Content-Length", std::to_string(jsonContent.length()));
request->set_body(jsonContent);
auto settings = make_shared< Settings >();
auto response = Http::sync(request, settings);
print(response)
}
What happens is that when I do the POST request from sendResult function it immediately gets a error response and does not wait for the real response.
What am I doing wrong?
Thanks.
I need to download a image files from the file system using RESTEasy web service and the input httpclient is JSON and the output response is
#Produces({"image/jpeg,image/png"})
Here is my client code:
public void getFileDownload(){
log("inside getServerPath....");
DefaultHttpClient httpClient = new DefaultHttpClient();
HttpPost httpPost = new HttpPost(downloadWebService_URL);
JSONObject json = new JSONObject();
json.put("filePath", "/ngs/app/sample.png");
json.put("fileName", "sample.png");
log("json-->"+json.toString());
StringEntity inputJson = null;
try {
inputJson = new StringEntity(json.toString());
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
log("inputJson = " + inputJson.toString());
inputJson.setContentType("application/json");
httpPost.setEntity(inputJson);
httpPost.addHeader("AppType", "TC");
log("httpPost... httpPost");
HttpResponse response = null;
try {
response = httpClient.execute(httpPost);
log("response:-->"+response);
}
catch (ClientProtocolException e) {
e.printStackTrace();
}
catch (IOException e) {
e.printStackTrace();
}
catch (Exception e)
{
log("E:: " + ExceptionUtils.getStackTrace(e));
}
}
Here is my Webservice code:
#Path("/downloadservice")
public class DownloadFileWS {
private static final String FILE_PATH = "/ngs/app/sample.png";
#POST
// #Path("/images")
#Path("/{fileName}/images")
#Consumes({"application/json"})
#Produces({"image/jpeg,image/png"})
public Response getImageFile(#PathParam("fileName") String fileName) {
File file = new File(FILE_PATH);
System.out.println("File requested is : " + fileName);
Logger.getLogger("!!!!!!!!!!!"+FILE_PATH);
System.out.println("########"+FILE_PATH);
ResponseBuilder response = Response.ok((Object) file);
response.header("Content-Disposition","attachment; filename=\"sample.png\"");
return response.build();
}
The HTTP Response is:
response:-->HTTP/1.1 200 OK [Date: Tue, 19 Jul 2016 00:36:22 GMT,
Content-Length: 6192, Content-Type: image/png, Content-Disposition:
attachment; filename="sample.png", X-Powered-By: Servlet/2.5 JSP/2.1]
org.apache.http.conn.BasicManagedEntity#2ace1307
Question:
1. Based on the response, it looks like the service is sending the image in HTTPResponse object. May i know how to download the image received from HTTP Response?
2. The requirement is to click a link which calls the webservice by passing JSON as input request and the image should automatically download to the user's local machine browser.