Lines Matching full:engine

3  * Handle async block request by crypto hardware engine.
13 #include <crypto/engine.h>
21 * @engine: the hardware engine
25 static void crypto_finalize_request(struct crypto_engine *engine, in crypto_finalize_request() argument
38 if (!engine->retry_support) { in crypto_finalize_request()
39 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_finalize_request()
40 if (engine->cur_req == req) { in crypto_finalize_request()
42 engine->cur_req = NULL; in crypto_finalize_request()
44 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_finalize_request()
47 if (finalize_req || engine->retry_support) { in crypto_finalize_request()
51 ret = enginectx->op.unprepare_request(engine, req); in crypto_finalize_request()
53 dev_err(engine->dev, "failed to unprepare request\n"); in crypto_finalize_request()
58 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_finalize_request()
62 * crypto_pump_requests - dequeue one request from engine queue to process
63 * @engine: the hardware engine
66 * This function checks if there is any request in the engine queue that
70 static void crypto_pump_requests(struct crypto_engine *engine, in crypto_pump_requests() argument
79 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
82 if (!engine->retry_support && engine->cur_req) in crypto_pump_requests()
86 if (engine->idling) { in crypto_pump_requests()
87 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_pump_requests()
91 /* Check if the engine queue is idle */ in crypto_pump_requests()
92 if (!crypto_queue_len(&engine->queue) || !engine->running) { in crypto_pump_requests()
93 if (!engine->busy) in crypto_pump_requests()
98 kthread_queue_work(engine->kworker, in crypto_pump_requests()
99 &engine->pump_requests); in crypto_pump_requests()
103 engine->busy = false; in crypto_pump_requests()
104 engine->idling = true; in crypto_pump_requests()
105 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
107 if (engine->unprepare_crypt_hardware && in crypto_pump_requests()
108 engine->unprepare_crypt_hardware(engine)) in crypto_pump_requests()
109 dev_err(engine->dev, "failed to unprepare crypt hardware\n"); in crypto_pump_requests()
111 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
112 engine->idling = false; in crypto_pump_requests()
117 /* Get the fist request from the engine queue to handle */ in crypto_pump_requests()
118 backlog = crypto_get_backlog(&engine->queue); in crypto_pump_requests()
119 async_req = crypto_dequeue_request(&engine->queue); in crypto_pump_requests()
128 if (!engine->retry_support) in crypto_pump_requests()
129 engine->cur_req = async_req; in crypto_pump_requests()
134 if (engine->busy) in crypto_pump_requests()
137 engine->busy = true; in crypto_pump_requests()
139 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
142 if (!was_busy && engine->prepare_crypt_hardware) { in crypto_pump_requests()
143 ret = engine->prepare_crypt_hardware(engine); in crypto_pump_requests()
145 dev_err(engine->dev, "failed to prepare crypt hardware\n"); in crypto_pump_requests()
153 ret = enginectx->op.prepare_request(engine, async_req); in crypto_pump_requests()
155 dev_err(engine->dev, "failed to prepare request: %d\n", in crypto_pump_requests()
161 dev_err(engine->dev, "failed to do request\n"); in crypto_pump_requests()
166 ret = enginectx->op.do_one_request(engine, async_req); in crypto_pump_requests()
175 if (!engine->retry_support || in crypto_pump_requests()
177 dev_err(engine->dev, in crypto_pump_requests()
185 * enqueue it back into crypto-engine queue. in crypto_pump_requests()
188 ret = enginectx->op.unprepare_request(engine, in crypto_pump_requests()
191 dev_err(engine->dev, in crypto_pump_requests()
194 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
197 * back in front of crypto-engine queue, to keep the order in crypto_pump_requests()
200 crypto_enqueue_request_head(&engine->queue, async_req); in crypto_pump_requests()
202 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_pump_requests()
210 ret = enginectx->op.unprepare_request(engine, async_req); in crypto_pump_requests()
212 dev_err(engine->dev, "failed to unprepare request\n"); in crypto_pump_requests()
219 /* If retry mechanism is supported, send new requests to engine */ in crypto_pump_requests()
220 if (engine->retry_support) { in crypto_pump_requests()
221 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
227 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
233 if (engine->do_batch_requests) { in crypto_pump_requests()
234 ret = engine->do_batch_requests(engine); in crypto_pump_requests()
236 dev_err(engine->dev, "failed to do batch requests: %d\n", in crypto_pump_requests()
245 struct crypto_engine *engine = in crypto_pump_work() local
248 crypto_pump_requests(engine, true); in crypto_pump_work()
252 * crypto_transfer_request - transfer the new request into the engine queue
253 * @engine: the hardware engine
254 * @req: the request need to be listed into the engine queue
256 static int crypto_transfer_request(struct crypto_engine *engine, in crypto_transfer_request() argument
263 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_transfer_request()
265 if (!engine->running) { in crypto_transfer_request()
266 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_transfer_request()
270 ret = crypto_enqueue_request(&engine->queue, req); in crypto_transfer_request()
272 if (!engine->busy && need_pump) in crypto_transfer_request()
273 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_transfer_request()
275 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_transfer_request()
281 * into the engine queue
282 * @engine: the hardware engine
283 * @req: the request need to be listed into the engine queue
285 static int crypto_transfer_request_to_engine(struct crypto_engine *engine, in crypto_transfer_request_to_engine() argument
288 return crypto_transfer_request(engine, req, true); in crypto_transfer_request_to_engine()
293 * to list into the engine queue
294 * @engine: the hardware engine
295 * @req: the request need to be listed into the engine queue
297 int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine, in crypto_transfer_aead_request_to_engine() argument
300 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_aead_request_to_engine()
306 * to list into the engine queue
307 * @engine: the hardware engine
308 * @req: the request need to be listed into the engine queue
310 int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine, in crypto_transfer_akcipher_request_to_engine() argument
313 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_akcipher_request_to_engine()
319 * to list into the engine queue
320 * @engine: the hardware engine
321 * @req: the request need to be listed into the engine queue
323 int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine, in crypto_transfer_hash_request_to_engine() argument
326 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_hash_request_to_engine()
332 * to list into the engine queue
333 * @engine: the hardware engine
334 * @req: the request need to be listed into the engine queue
336 int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine, in crypto_transfer_skcipher_request_to_engine() argument
339 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_skcipher_request_to_engine()
346 * @engine: the hardware engine
350 void crypto_finalize_aead_request(struct crypto_engine *engine, in crypto_finalize_aead_request() argument
353 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_aead_request()
360 * @engine: the hardware engine
364 void crypto_finalize_akcipher_request(struct crypto_engine *engine, in crypto_finalize_akcipher_request() argument
367 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_akcipher_request()
374 * @engine: the hardware engine
378 void crypto_finalize_hash_request(struct crypto_engine *engine, in crypto_finalize_hash_request() argument
381 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_hash_request()
388 * @engine: the hardware engine
392 void crypto_finalize_skcipher_request(struct crypto_engine *engine, in crypto_finalize_skcipher_request() argument
395 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_skcipher_request()
400 * crypto_engine_start - start the hardware engine
401 * @engine: the hardware engine need to be started
405 int crypto_engine_start(struct crypto_engine *engine) in crypto_engine_start() argument
409 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_engine_start()
411 if (engine->running || engine->busy) { in crypto_engine_start()
412 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_start()
416 engine->running = true; in crypto_engine_start()
417 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_start()
419 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_engine_start()
426 * crypto_engine_stop - stop the hardware engine
427 * @engine: the hardware engine need to be stopped
431 int crypto_engine_stop(struct crypto_engine *engine) in crypto_engine_stop() argument
437 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_engine_stop()
440 * If the engine queue is not empty or the engine is on busy state, in crypto_engine_stop()
441 * we need to wait for a while to pump the requests of engine queue. in crypto_engine_stop()
443 while ((crypto_queue_len(&engine->queue) || engine->busy) && limit--) { in crypto_engine_stop()
444 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_stop()
446 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_engine_stop()
449 if (crypto_queue_len(&engine->queue) || engine->busy) in crypto_engine_stop()
452 engine->running = false; in crypto_engine_stop()
454 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_stop()
457 dev_warn(engine->dev, "could not stop engine\n"); in crypto_engine_stop()
464 * crypto_engine_alloc_init_and_set - allocate crypto hardware engine structure
466 * crypto-engine queue.
467 * @dev: the device attached with one hardware engine
472 * callback(struct crypto_engine *engine)
474 * @engine: the crypto engine structure.
476 * @qlen: maximum size of the crypto-engine queue
479 * Return: the crypto engine structure on success, else NULL.
483 int (*cbk_do_batch)(struct crypto_engine *engine), in crypto_engine_alloc_init_and_set() argument
486 struct crypto_engine *engine; in crypto_engine_alloc_init_and_set() local
491 engine = devm_kzalloc(dev, sizeof(*engine), GFP_KERNEL); in crypto_engine_alloc_init_and_set()
492 if (!engine) in crypto_engine_alloc_init_and_set()
495 engine->dev = dev; in crypto_engine_alloc_init_and_set()
496 engine->rt = rt; in crypto_engine_alloc_init_and_set()
497 engine->running = false; in crypto_engine_alloc_init_and_set()
498 engine->busy = false; in crypto_engine_alloc_init_and_set()
499 engine->idling = false; in crypto_engine_alloc_init_and_set()
500 engine->retry_support = retry_support; in crypto_engine_alloc_init_and_set()
501 engine->priv_data = dev; in crypto_engine_alloc_init_and_set()
506 engine->do_batch_requests = retry_support ? cbk_do_batch : NULL; in crypto_engine_alloc_init_and_set()
508 snprintf(engine->name, sizeof(engine->name), in crypto_engine_alloc_init_and_set()
509 "%s-engine", dev_name(dev)); in crypto_engine_alloc_init_and_set()
511 crypto_init_queue(&engine->queue, qlen); in crypto_engine_alloc_init_and_set()
512 spin_lock_init(&engine->queue_lock); in crypto_engine_alloc_init_and_set()
514 engine->kworker = kthread_create_worker(0, "%s", engine->name); in crypto_engine_alloc_init_and_set()
515 if (IS_ERR(engine->kworker)) { in crypto_engine_alloc_init_and_set()
519 kthread_init_work(&engine->pump_requests, crypto_pump_work); in crypto_engine_alloc_init_and_set()
521 if (engine->rt) { in crypto_engine_alloc_init_and_set()
523 sched_set_fifo(engine->kworker->task); in crypto_engine_alloc_init_and_set()
526 return engine; in crypto_engine_alloc_init_and_set()
531 * crypto_engine_alloc_init - allocate crypto hardware engine structure and
533 * @dev: the device attached with one hardware engine
537 * Return: the crypto engine structure on success, else NULL.
547 * crypto_engine_exit - free the resources of hardware engine when exit
548 * @engine: the hardware engine need to be freed
552 int crypto_engine_exit(struct crypto_engine *engine) in crypto_engine_exit() argument
556 ret = crypto_engine_stop(engine); in crypto_engine_exit()
560 kthread_destroy_worker(engine->kworker); in crypto_engine_exit()
567 MODULE_DESCRIPTION("Crypto hardware engine framework");